Http utils now supports logging on our main logger torrentSearch. Changed indentation and added better error handling for requests.urlopen which is the function that fetches from the internet.
This commit is contained in:
@@ -1,38 +1,43 @@
|
|||||||
#!/usr/bin/env python3.6
|
#!/usr/bin/env python3.6
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
|
||||||
from urllib import parse, request
|
from urllib import parse, request
|
||||||
from urllib.error import URLError
|
from urllib.error import URLError
|
||||||
import logging
|
|
||||||
|
logger = logging.getLogger('torrentSearch')
|
||||||
|
|
||||||
def build_url(ssl, baseUrl, path, args_dict=[]):
|
def build_url(ssl, baseUrl, path, args_dict=[]):
|
||||||
url_parts = list(parse.urlparse(baseUrl))
|
url_parts = list(parse.urlparse(baseUrl))
|
||||||
url_parts[0] = 'https' if ssl else 'http'
|
url_parts[0] = 'https' if ssl else 'http'
|
||||||
if type(path) is list:
|
if type(path) is list:
|
||||||
url_parts[2] = '/'.join(path)
|
url_parts[2] = '/'.join(path)
|
||||||
else:
|
else:
|
||||||
url_parts[2] = path
|
url_parts[2] = path
|
||||||
url_parts[4] = parse.urlencode(args_dict)
|
url_parts[4] = parse.urlencode(args_dict)
|
||||||
return parse.urlunparse(url_parts)
|
return parse.urlunparse(url_parts)
|
||||||
|
|
||||||
# Converts a input string or list to percent-encoded string,
|
# Converts a input string or list to percent-encoded string,
|
||||||
# this is for encoding information in a Uniform Resource
|
# this is for encoding information in a Uniform Resource
|
||||||
# Identifier (URI) using urllib
|
# Identifier (URI) using urllib
|
||||||
def convert_query_to_percent_encoded_octets(input_query):
|
def convert_query_to_percent_encoded_octets(input_query):
|
||||||
if type(input_query) is list:
|
if type(input_query) is list:
|
||||||
input_query = ' '.join(input_query)
|
input_query = ' '.join(input_query)
|
||||||
|
|
||||||
return parse.quote(input_query)
|
return parse.quote(input_query)
|
||||||
|
|
||||||
def fetch_url(url):
|
def fetch_url(url):
|
||||||
req = request.Request(url, headers={'User-Agent': 'Mozilla/5.0'})
|
logger.debug('Fetching query: {}'.format(url))
|
||||||
try:
|
req = request.Request(url, headers={'User-Agent': 'Mozilla/5.0'})
|
||||||
response = request.urlopen(req)
|
try:
|
||||||
except URLError as e:
|
response = request.urlopen(req, timeout=10)
|
||||||
if hasattr(e, 'reason'):
|
return response
|
||||||
logging.error('We failed to reach a server with request: %s' % req.full_url)
|
except URLError as e:
|
||||||
logging.error('Reason: %s' % e.reason)
|
if hasattr(e, 'reason'):
|
||||||
elif hasattr(e, 'code'):
|
logger.error('We failed to reach a server with request: %s' % req.full_url)
|
||||||
logging.error('The server couldn\'t fulfill the request.')
|
logger.error('Reason: %s' % e.reason)
|
||||||
logging.error('Error code: ', e.code)
|
elif hasattr(e, 'code'):
|
||||||
else:
|
logger.error('The server couldn\'t fulfill the request.')
|
||||||
return response
|
logger.error('Error code: ', e.code)
|
||||||
|
sys.exit()
|
||||||
|
|||||||
Reference in New Issue
Block a user