WIP Searching all request pages, better formatting and error handling

This commit is contained in:
2019-07-24 08:24:21 +02:00
parent 5394d45918
commit 7bb4638714

120
main.py
View File

@@ -4,20 +4,35 @@ from redis import Redis
import pickle
from urllib.parse import urljoin, quote
import os
import sys
from pprint import pprint
BASE_URL = 'https://api.kevinmidboe.com/'
AUTHORIZATION_TOKEN = None
CACHE_FILE = './cache.pickle'
from dotenv import load_dotenv
load_dotenv(dotenv_path='.env')
BASE_URL = os.getenv('BASE_URL') or None
AUTHORIZATION_TOKEN = os.getenv('AUTHORIZATION_TOKEN') or None
CACHE = Redis(host='localhost', port=6379, db=0)
USER = os.getenv('USER') or None
PASS = os.getenv('PASS') or None
if None in [BASE_URL, USER, PASS]:
print('ERROR! Set environment variables, see ./.env-example or README')
exit(0)
# - - Cache
def writeObjectToCache(key, obj):
print('object to cache', obj)
print('saving response with key:', key)
pickledObj = pickle.dumps(obj)
CACHE.set(key, pickledObj)
CACHE.set(key, pickledObj, ex=600)
def readCache(key):
value = CACHE.get(key)
if value is None:
return None
return pickle.loads(value)
def flushCache():
@@ -26,16 +41,36 @@ def flushCache():
def releaseFromCache(request):
return False
# - - MISC
METRIC_PREFIX_VALUES = {'KB': 1000, 'MB': 1000000, 'GB': 1000000000}
def humanReadableToBytes(sizeString):
[value, prefix] = sizeString.split()
byteSize = float(value) * METRIC_PREFIX_VALUES[prefix]
return byteSize
# - - HTTP API
# TODO Move authentication to happen at begining and use the set value throughtout
# could recheck if a request returns un-authed.
# Releases should therefor not
def authenticateSeasoned(username, password):
global AUTHORIZATION_TOKEN
uri = urljoin(BASE_URL, '/api/v1/user/login')
payload = { 'username': username, 'password': password }
r = requests.post(uri, data=payload)
data = r.json()
AUTHORIZATION_TOKEN = data['token']
def fetchRequests(pages=1):
uri = urljoin(BASE_URL, '/api/v2/request')
print('VERBOSE | Signing in to page: {}'.format(uri))
response = requests.post(uri, data=payload)
data = response.json()
if response.status_code == requests.codes.ok:
AUTHORIZATION_TOKEN = data['token']
else:
print('ERROR! {}: {}'.format(response.status_code, data['error']))
exit(0)
def fetchRequests(page=1):
uri = urljoin(BASE_URL, '/api/v2/request?page=' + str(page))
r = requests.get(uri)
return r.json()
@@ -46,11 +81,12 @@ def releasesFromRequest(request):
return cacheHit
headers = { 'authorization': AUTHORIZATION_TOKEN }
print('VERBOSE | Searcing for releases at {} with auth token: {}'.format(uri, AUTHORIZATION_TOKEN))
r = requests.get(uri, headers=headers)
if r.status_code == requests.codes.unauthorized:
print('uath')
authenticateSeasoned('kevin', 'test123')
print('uathed. Signing in as {}'.format(USER))
authenticateSeasoned(USER, PASS)
releasesFromRequest(request)
return
@@ -61,25 +97,67 @@ def releasesFromRequest(request):
else:
return None
# - - FORMATTING
def printReleases(releases):
if len(releases) == 0:
print('No releases found')
return None
releases.sort(key=lambda x: humanReadableToBytes(x['size']), reverse=True)
for release in releases:
print('{:80} | {}\t | {}'.format(release['name'], release['size'], release['seed']))
allReleases = []
def takePageGetRequestsAndReleases(page=1):
global allReleases
requests = fetchRequests(page)
results = requests['results']
totalPages = requests['total_pages']
for request in results:
print('Finding torrent for:', request['title'])
releases = releasesFromRequest(request)
if releases:
printReleases(releases['results'])
allReleases.append({'req': request, 'rel': releases})
if totalPages - page > 0:
print('More pages to index, moving to page:', page + 1)
takePageGetRequestsAndReleases(page + 1)
return allReleases
def main():
print('Fetching all requested movies and shows..')
TwentyOneForever = takePageGetRequestsAndReleases()
exit(0)
requests = fetchRequests()
results = requests['results']
currentPage = requests['page']
totalPages = requests['total_pages']
mediaWithReleases = []
for result in results:
print('Finding torrents for:', result['title'])
releases = releasesFromRequest(result)
mediaWithReleases.append({'rel': releases, 'media': result})
pprint(mediaWithReleases[:3])
if releases:
printReleases(releases['results'])
for l in mediaWithReleases[:3]:
if len(l['rel']['results']) > 0:
print(l['rel']['results'][0]['release_type'])
print(totalPages)
mediaWithReleases.append({'rel': releases, 'media': result})
# pprint(mediaWithReleases[:5])
print(type(totalPages))
print(type(currentPage))
pagesLeft = totalPages - currentPage
print('pages left:', pagesLeft)