mirror of
https://github.com/KevinMidboe/fetch-the-release.git
synced 2025-10-29 17:40:23 +00:00
WIP Searching all request pages, better formatting and error handling
This commit is contained in:
120
main.py
120
main.py
@@ -4,20 +4,35 @@ from redis import Redis
|
|||||||
import pickle
|
import pickle
|
||||||
from urllib.parse import urljoin, quote
|
from urllib.parse import urljoin, quote
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
from pprint import pprint
|
from pprint import pprint
|
||||||
|
|
||||||
BASE_URL = 'https://api.kevinmidboe.com/'
|
from dotenv import load_dotenv
|
||||||
AUTHORIZATION_TOKEN = None
|
load_dotenv(dotenv_path='.env')
|
||||||
CACHE_FILE = './cache.pickle'
|
|
||||||
|
BASE_URL = os.getenv('BASE_URL') or None
|
||||||
|
AUTHORIZATION_TOKEN = os.getenv('AUTHORIZATION_TOKEN') or None
|
||||||
CACHE = Redis(host='localhost', port=6379, db=0)
|
CACHE = Redis(host='localhost', port=6379, db=0)
|
||||||
|
|
||||||
|
|
||||||
|
USER = os.getenv('USER') or None
|
||||||
|
PASS = os.getenv('PASS') or None
|
||||||
|
|
||||||
|
if None in [BASE_URL, USER, PASS]:
|
||||||
|
print('ERROR! Set environment variables, see ./.env-example or README')
|
||||||
|
exit(0)
|
||||||
|
|
||||||
|
# - - Cache
|
||||||
|
|
||||||
def writeObjectToCache(key, obj):
|
def writeObjectToCache(key, obj):
|
||||||
print('object to cache', obj)
|
print('saving response with key:', key)
|
||||||
pickledObj = pickle.dumps(obj)
|
pickledObj = pickle.dumps(obj)
|
||||||
CACHE.set(key, pickledObj)
|
CACHE.set(key, pickledObj, ex=600)
|
||||||
|
|
||||||
def readCache(key):
|
def readCache(key):
|
||||||
value = CACHE.get(key)
|
value = CACHE.get(key)
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
return pickle.loads(value)
|
return pickle.loads(value)
|
||||||
|
|
||||||
def flushCache():
|
def flushCache():
|
||||||
@@ -26,16 +41,36 @@ def flushCache():
|
|||||||
def releaseFromCache(request):
|
def releaseFromCache(request):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
# - - MISC
|
||||||
|
|
||||||
|
METRIC_PREFIX_VALUES = {'KB': 1000, 'MB': 1000000, 'GB': 1000000000}
|
||||||
|
def humanReadableToBytes(sizeString):
|
||||||
|
[value, prefix] = sizeString.split()
|
||||||
|
byteSize = float(value) * METRIC_PREFIX_VALUES[prefix]
|
||||||
|
return byteSize
|
||||||
|
|
||||||
|
|
||||||
|
# - - HTTP API
|
||||||
|
|
||||||
|
# TODO Move authentication to happen at begining and use the set value throughtout
|
||||||
|
# could recheck if a request returns un-authed.
|
||||||
|
# Releases should therefor not
|
||||||
def authenticateSeasoned(username, password):
|
def authenticateSeasoned(username, password):
|
||||||
global AUTHORIZATION_TOKEN
|
global AUTHORIZATION_TOKEN
|
||||||
uri = urljoin(BASE_URL, '/api/v1/user/login')
|
uri = urljoin(BASE_URL, '/api/v1/user/login')
|
||||||
payload = { 'username': username, 'password': password }
|
payload = { 'username': username, 'password': password }
|
||||||
r = requests.post(uri, data=payload)
|
print('VERBOSE | Signing in to page: {}'.format(uri))
|
||||||
data = r.json()
|
response = requests.post(uri, data=payload)
|
||||||
AUTHORIZATION_TOKEN = data['token']
|
data = response.json()
|
||||||
|
if response.status_code == requests.codes.ok:
|
||||||
def fetchRequests(pages=1):
|
AUTHORIZATION_TOKEN = data['token']
|
||||||
uri = urljoin(BASE_URL, '/api/v2/request')
|
else:
|
||||||
|
print('ERROR! {}: {}'.format(response.status_code, data['error']))
|
||||||
|
exit(0)
|
||||||
|
|
||||||
|
def fetchRequests(page=1):
|
||||||
|
uri = urljoin(BASE_URL, '/api/v2/request?page=' + str(page))
|
||||||
r = requests.get(uri)
|
r = requests.get(uri)
|
||||||
return r.json()
|
return r.json()
|
||||||
|
|
||||||
@@ -46,11 +81,12 @@ def releasesFromRequest(request):
|
|||||||
return cacheHit
|
return cacheHit
|
||||||
headers = { 'authorization': AUTHORIZATION_TOKEN }
|
headers = { 'authorization': AUTHORIZATION_TOKEN }
|
||||||
|
|
||||||
|
print('VERBOSE | Searcing for releases at {} with auth token: {}'.format(uri, AUTHORIZATION_TOKEN))
|
||||||
r = requests.get(uri, headers=headers)
|
r = requests.get(uri, headers=headers)
|
||||||
|
|
||||||
if r.status_code == requests.codes.unauthorized:
|
if r.status_code == requests.codes.unauthorized:
|
||||||
print('uath')
|
print('uathed. Signing in as {}'.format(USER))
|
||||||
authenticateSeasoned('kevin', 'test123')
|
authenticateSeasoned(USER, PASS)
|
||||||
releasesFromRequest(request)
|
releasesFromRequest(request)
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -61,25 +97,67 @@ def releasesFromRequest(request):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# - - FORMATTING
|
||||||
|
|
||||||
|
def printReleases(releases):
|
||||||
|
if len(releases) == 0:
|
||||||
|
print('No releases found')
|
||||||
|
return None
|
||||||
|
|
||||||
|
releases.sort(key=lambda x: humanReadableToBytes(x['size']), reverse=True)
|
||||||
|
for release in releases:
|
||||||
|
print('{:80} | {}\t | {}'.format(release['name'], release['size'], release['seed']))
|
||||||
|
|
||||||
|
|
||||||
|
allReleases = []
|
||||||
|
def takePageGetRequestsAndReleases(page=1):
|
||||||
|
global allReleases
|
||||||
|
requests = fetchRequests(page)
|
||||||
|
results = requests['results']
|
||||||
|
totalPages = requests['total_pages']
|
||||||
|
|
||||||
|
for request in results:
|
||||||
|
print('Finding torrent for:', request['title'])
|
||||||
|
releases = releasesFromRequest(request)
|
||||||
|
if releases:
|
||||||
|
printReleases(releases['results'])
|
||||||
|
allReleases.append({'req': request, 'rel': releases})
|
||||||
|
|
||||||
|
if totalPages - page > 0:
|
||||||
|
print('More pages to index, moving to page:', page + 1)
|
||||||
|
takePageGetRequestsAndReleases(page + 1)
|
||||||
|
return allReleases
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
print('Fetching all requested movies and shows..')
|
||||||
|
TwentyOneForever = takePageGetRequestsAndReleases()
|
||||||
|
exit(0)
|
||||||
|
|
||||||
|
|
||||||
requests = fetchRequests()
|
requests = fetchRequests()
|
||||||
results = requests['results']
|
results = requests['results']
|
||||||
|
currentPage = requests['page']
|
||||||
totalPages = requests['total_pages']
|
totalPages = requests['total_pages']
|
||||||
|
|
||||||
mediaWithReleases = []
|
mediaWithReleases = []
|
||||||
|
|
||||||
for result in results:
|
for result in results:
|
||||||
|
print('Finding torrents for:', result['title'])
|
||||||
releases = releasesFromRequest(result)
|
releases = releasesFromRequest(result)
|
||||||
mediaWithReleases.append({'rel': releases, 'media': result})
|
if releases:
|
||||||
pprint(mediaWithReleases[:3])
|
printReleases(releases['results'])
|
||||||
|
|
||||||
for l in mediaWithReleases[:3]:
|
|
||||||
if len(l['rel']['results']) > 0:
|
mediaWithReleases.append({'rel': releases, 'media': result})
|
||||||
print(l['rel']['results'][0]['release_type'])
|
|
||||||
|
# pprint(mediaWithReleases[:5])
|
||||||
print(totalPages)
|
|
||||||
|
print(type(totalPages))
|
||||||
|
print(type(currentPage))
|
||||||
|
pagesLeft = totalPages - currentPage
|
||||||
|
print('pages left:', pagesLeft)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user