Started to create a small FLASK restful api for handling requests

This commit is contained in:
2017-01-31 21:44:42 +01:00
parent 261b3afa01
commit b7aa452911
871 changed files with 118083 additions and 6 deletions

View File

@@ -0,0 +1 @@
#

View File

@@ -0,0 +1,2 @@
def main():
return

View File

@@ -0,0 +1,30 @@
from setuptools import setup
try:
unicode
def u8(s):
return s.decode('unicode-escape')
except NameError:
def u8(s):
return s
setup(name='complex-dist',
version='0.1',
description=u8('Another testing distribution \N{SNOWMAN}'),
long_description=u8('Another testing distribution \N{SNOWMAN}'),
author="Illustrious Author",
author_email="illustrious@example.org",
url="http://example.org/exemplary",
packages=['complexdist'],
setup_requires=["wheel", "setuptools"],
install_requires=["quux", "splort"],
extras_require={'simple':['simple.dist']},
tests_require=["foo", "bar>=10.0.0"],
entry_points={
'console_scripts': [
'complex-dist=complexdist:main',
'complex-dist2=complexdist:main',
],
},
)

View File

@@ -0,0 +1,16 @@
from setuptools import setup
try:
unicode
def u8(s):
return s.decode('unicode-escape').encode('utf-8')
except NameError:
def u8(s):
return s.encode('utf-8')
setup(name='headers.dist',
version='0.1',
description=u8('A distribution with headers'),
headers=['header.h']
)

View File

@@ -0,0 +1,362 @@
{
"id": "http://www.python.org/dev/peps/pep-0426/",
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "Metadata for Python Software Packages 2.0",
"type": "object",
"properties": {
"metadata_version": {
"description": "Version of the file format",
"type": "string",
"pattern": "^(\\d+(\\.\\d+)*)$"
},
"generator": {
"description": "Name and version of the program that produced this file.",
"type": "string",
"pattern": "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])( \\(.*\\))?$"
},
"name": {
"description": "The name of the distribution.",
"type": "string",
"$ref": "#/definitions/distribution_name"
},
"version": {
"description": "The distribution's public version identifier",
"type": "string",
"pattern": "^(\\d+(\\.\\d+)*)((a|b|c|rc)(\\d+))?(\\.(post)(\\d+))?(\\.(dev)(\\d+))?$"
},
"source_label": {
"description": "A constrained identifying text string",
"type": "string",
"pattern": "^[0-9a-z_.-+]+$"
},
"source_url": {
"description": "A string containing a full URL where the source for this specific version of the distribution can be downloaded.",
"type": "string",
"format": "uri"
},
"summary": {
"description": "A one-line summary of what the distribution does.",
"type": "string"
},
"extras": {
"description": "A list of optional sets of dependencies that may be used to define conditional dependencies in \"may_require\" and similar fields.",
"type": "array",
"items": {
"type": "string",
"$ref": "#/definitions/extra_name"
}
},
"meta_requires": {
"description": "A list of subdistributions made available through this metadistribution.",
"type": "array",
"$ref": "#/definitions/dependencies"
},
"run_requires": {
"description": "A list of other distributions needed to run this distribution.",
"type": "array",
"$ref": "#/definitions/dependencies"
},
"test_requires": {
"description": "A list of other distributions needed when this distribution is tested.",
"type": "array",
"$ref": "#/definitions/dependencies"
},
"build_requires": {
"description": "A list of other distributions needed when this distribution is built.",
"type": "array",
"$ref": "#/definitions/dependencies"
},
"dev_requires": {
"description": "A list of other distributions needed when this distribution is developed.",
"type": "array",
"$ref": "#/definitions/dependencies"
},
"provides": {
"description": "A list of strings naming additional dependency requirements that are satisfied by installing this distribution. These strings must be of the form Name or Name (Version)",
"type": "array",
"items": {
"type": "string",
"$ref": "#/definitions/provides_declaration"
}
},
"modules": {
"description": "A list of modules and/or packages available for import after installing this distribution.",
"type": "array",
"items": {
"type": "string",
"$ref": "#/definitions/qualified_name"
}
},
"namespaces": {
"description": "A list of namespace packages this distribution contributes to",
"type": "array",
"items": {
"type": "string",
"$ref": "#/definitions/qualified_name"
}
},
"obsoleted_by": {
"description": "A string that indicates that this project is no longer being developed. The named project provides a substitute or replacement.",
"type": "string",
"$ref": "#/definitions/requirement"
},
"supports_environments": {
"description": "A list of strings specifying the environments that the distribution explicitly supports.",
"type": "array",
"items": {
"type": "string",
"$ref": "#/definitions/environment_marker"
}
},
"install_hooks": {
"description": "The install_hooks field is used to define various operations that may be invoked on a distribution in a platform independent manner.",
"type": "object",
"properties": {
"postinstall": {
"type": "string",
"$ref": "#/definitions/export_specifier"
},
"preuninstall": {
"type": "string",
"$ref": "#/definitions/export_specifier"
}
}
},
"extensions": {
"description": "Extensions to the metadata may be present in a mapping under the 'extensions' key.",
"type": "object",
"$ref": "#/definitions/extensions"
}
},
"required": ["metadata_version", "name", "version", "summary"],
"additionalProperties": false,
"definitions": {
"contact": {
"type": "object",
"properties": {
"name": {
"type": "string"
},
"email": {
"type": "string"
},
"url": {
"type": "string"
},
"role": {
"type": "string"
}
},
"required": ["name"],
"additionalProperties": false
},
"dependencies": {
"type": "array",
"items": {
"type": "object",
"$ref": "#/definitions/dependency"
}
},
"dependency": {
"type": "object",
"properties": {
"extra": {
"type": "string",
"$ref": "#/definitions/extra_name"
},
"environment": {
"type": "string",
"$ref": "#/definitions/environment_marker"
},
"requires": {
"type": "array",
"items": {
"type": "string",
"$ref": "#/definitions/requirement"
}
}
},
"required": ["requires"],
"additionalProperties": false
},
"extensions": {
"type": "object",
"patternProperties": {
"^[A-Za-z][0-9A-Za-z_]*([.][0-9A-Za-z_]*)*$": {}
},
"properties": {
"python.details" : {
"description": "More information regarding the distribution.",
"type": "object",
"properties": {
"document_names": {
"description": "Names of supporting metadata documents",
"type": "object",
"properties": {
"description": {
"type": "string",
"$ref": "#/definitions/document_name"
},
"changelog": {
"type": "string",
"$ref": "#/definitions/document_name"
},
"license": {
"type": "string",
"$ref": "#/definitions/document_name"
}
},
"additionalProperties": false
},
"keywords": {
"description": "A list of additional keywords to be used to assist searching for the distribution in a larger catalog.",
"type": "array",
"items": {
"type": "string"
}
},
"license": {
"description": "A string indicating the license covering the distribution.",
"type": "string"
},
"classifiers": {
"description": "A list of strings, with each giving a single classification value for the distribution.",
"type": "array",
"items": {
"type": "string"
}
}
}
},
"python.project" : {
"description": "More information regarding the creation and maintenance of the distribution.",
"$ref": "#/definitions/project_or_integrator"
},
"python.integrator" : {
"description": "More information regarding the downstream redistributor of the distribution.",
"$ref": "#/definitions/project_or_integrator"
},
"python.commands" : {
"description": "Command line interfaces provided by this distribution",
"type": "object",
"$ref": "#/definitions/commands"
},
"python.exports" : {
"description": "Other exported interfaces provided by this distribution",
"type": "object",
"$ref": "#/definitions/exports"
}
},
"additionalProperties": false
},
"commands": {
"type": "object",
"properties": {
"wrap_console": {
"type": "object",
"$ref": "#/definitions/command_map"
},
"wrap_gui": {
"type": "object",
"$ref": "#/definitions/command_map"
},
"prebuilt": {
"type": "array",
"items": {
"type": "string",
"$ref": "#/definitions/relative_path"
}
}
},
"additionalProperties": false
},
"exports": {
"type": "object",
"patternProperties": {
"^[A-Za-z][0-9A-Za-z_]*([.][0-9A-Za-z_]*)*$": {
"type": "object",
"patternProperties": {
".": {
"type": "string",
"$ref": "#/definitions/export_specifier"
}
},
"additionalProperties": false
}
},
"additionalProperties": false
},
"command_map": {
"type": "object",
"patternProperties": {
"^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$": {
"type": "string",
"$ref": "#/definitions/export_specifier"
}
},
"additionalProperties": false
},
"project_or_integrator" : {
"type": "object",
"properties" : {
"contacts": {
"description": "A list of contributor entries giving the recommended contact points for getting more information about the project.",
"type": "array",
"items": {
"type": "object",
"$ref": "#/definitions/contact"
}
},
"contributors": {
"description": "A list of contributor entries for other contributors not already listed as current project points of contact.",
"type": "array",
"items": {
"type": "object",
"$ref": "#/definitions/contact"
}
},
"project_urls": {
"description": "A mapping of arbitrary text labels to additional URLs relevant to the project.",
"type": "object"
}
}
},
"distribution_name": {
"type": "string",
"pattern": "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$"
},
"requirement": {
"type": "string"
},
"provides_declaration": {
"type": "string"
},
"environment_marker": {
"type": "string"
},
"document_name": {
"type": "string"
},
"extra_name" : {
"type": "string",
"pattern": "^[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?$"
},
"relative_path" : {
"type": "string"
},
"export_specifier": {
"type": "string",
"pattern": "^([A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_][A-Za-z_0-9]*)*)(:[A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_][A-Za-z_0-9]*)*)?(\\[[0-9A-Za-z]([0-9A-Za-z_.-]*[0-9A-Za-z])?\\])?$"
},
"qualified_name" : {
"type": "string",
"pattern": "^[A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_][A-Za-z_0-9]*)*$"
},
"prefixed_name" : {
"type": "string",
"pattern": "^[A-Za-z_][A-Za-z_0-9]*([.][A-Za-z_0-9]*)*$"
}
}
}

View File

@@ -0,0 +1,17 @@
from setuptools import setup
try:
unicode
def u8(s):
return s.decode('unicode-escape').encode('utf-8')
except NameError:
def u8(s):
return s.encode('utf-8')
setup(name='simple.dist',
version='0.1',
description=u8('A testing distribution \N{SNOWMAN}'),
packages=['simpledist'],
extras_require={'voting': ['beaglevote']},
)

View File

@@ -0,0 +1,176 @@
"""
Basic wheel tests.
"""
import os
import pkg_resources
import json
import sys
from pkg_resources import resource_filename
import wheel.util
import wheel.tool
from wheel import egg2wheel
from wheel.install import WheelFile
from zipfile import ZipFile
from shutil import rmtree
test_distributions = ("complex-dist", "simple.dist", "headers.dist")
def teardown_module():
"""Delete eggs/wheels created by tests."""
base = pkg_resources.resource_filename('wheel.test', '')
for dist in test_distributions:
for subdir in ('build', 'dist'):
try:
rmtree(os.path.join(base, dist, subdir))
except OSError:
pass
def setup_module():
build_wheel()
build_egg()
def build_wheel():
"""Build wheels from test distributions."""
for dist in test_distributions:
pwd = os.path.abspath(os.curdir)
distdir = pkg_resources.resource_filename('wheel.test', dist)
os.chdir(distdir)
try:
sys.argv = ['', 'bdist_wheel']
exec(compile(open('setup.py').read(), 'setup.py', 'exec'))
finally:
os.chdir(pwd)
def build_egg():
"""Build eggs from test distributions."""
for dist in test_distributions:
pwd = os.path.abspath(os.curdir)
distdir = pkg_resources.resource_filename('wheel.test', dist)
os.chdir(distdir)
try:
sys.argv = ['', 'bdist_egg']
exec(compile(open('setup.py').read(), 'setup.py', 'exec'))
finally:
os.chdir(pwd)
def test_findable():
"""Make sure pkg_resources can find us."""
assert pkg_resources.working_set.by_key['wheel'].version
def test_egg_re():
"""Make sure egg_info_re matches."""
egg_names = open(pkg_resources.resource_filename('wheel', 'eggnames.txt'))
for line in egg_names:
line = line.strip()
if not line:
continue
assert egg2wheel.egg_info_re.match(line), line
def test_compatibility_tags():
"""Test compatibilty tags are working."""
wf = WheelFile("package-1.0.0-cp32.cp33-noabi-noarch.whl")
assert (list(wf.compatibility_tags) ==
[('cp32', 'noabi', 'noarch'), ('cp33', 'noabi', 'noarch')])
assert (wf.arity == 2)
wf2 = WheelFile("package-1.0.0-1st-cp33-noabi-noarch.whl")
wf2_info = wf2.parsed_filename.groupdict()
assert wf2_info['build'] == '1st', wf2_info
def test_convert_egg():
base = pkg_resources.resource_filename('wheel.test', '')
for dist in test_distributions:
distdir = os.path.join(base, dist, 'dist')
eggs = [e for e in os.listdir(distdir) if e.endswith('.egg')]
wheel.tool.convert(eggs, distdir, verbose=False)
def test_unpack():
"""
Make sure 'wheel unpack' works.
This also verifies the integrity of our testing wheel files.
"""
for dist in test_distributions:
distdir = pkg_resources.resource_filename('wheel.test',
os.path.join(dist, 'dist'))
for wheelfile in (w for w in os.listdir(distdir) if w.endswith('.whl')):
wheel.tool.unpack(os.path.join(distdir, wheelfile), distdir)
def test_no_scripts():
"""Make sure entry point scripts are not generated."""
dist = "complex-dist"
basedir = pkg_resources.resource_filename('wheel.test', dist)
for (dirname, subdirs, filenames) in os.walk(basedir):
for filename in filenames:
if filename.endswith('.whl'):
whl = ZipFile(os.path.join(dirname, filename))
for entry in whl.infolist():
assert not '.data/scripts/' in entry.filename
def test_pydist():
"""Make sure pydist.json exists and validates against our schema."""
# XXX this test may need manual cleanup of older wheels
import jsonschema
def open_json(filename):
return json.loads(open(filename, 'rb').read().decode('utf-8'))
pymeta_schema = open_json(resource_filename('wheel.test',
'pydist-schema.json'))
valid = 0
for dist in ("simple.dist", "complex-dist"):
basedir = pkg_resources.resource_filename('wheel.test', dist)
for (dirname, subdirs, filenames) in os.walk(basedir):
for filename in filenames:
if filename.endswith('.whl'):
whl = ZipFile(os.path.join(dirname, filename))
for entry in whl.infolist():
if entry.filename.endswith('/metadata.json'):
pymeta = json.loads(whl.read(entry).decode('utf-8'))
jsonschema.validate(pymeta, pymeta_schema)
valid += 1
assert valid > 0, "No metadata.json found"
def test_util():
"""Test functions in util.py."""
for i in range(10):
before = b'*' * i
encoded = wheel.util.urlsafe_b64encode(before)
assert not encoded.endswith(b'=')
after = wheel.util.urlsafe_b64decode(encoded)
assert before == after
def test_pick_best():
"""Test the wheel ranking algorithm."""
def get_tags(res):
info = res[-1].parsed_filename.groupdict()
return info['pyver'], info['abi'], info['plat']
cand_tags = [('py27', 'noabi', 'noarch'), ('py26', 'noabi', 'noarch'),
('cp27', 'noabi', 'linux_i686'),
('cp26', 'noabi', 'linux_i686'),
('cp27', 'noabi', 'linux_x86_64'),
('cp26', 'noabi', 'linux_x86_64')]
cand_wheels = [WheelFile('testpkg-1.0-%s-%s-%s.whl' % t)
for t in cand_tags]
supported = [('cp27', 'noabi', 'linux_i686'), ('py27', 'noabi', 'noarch')]
supported2 = [('cp27', 'noabi', 'linux_i686'), ('py27', 'noabi', 'noarch'),
('cp26', 'noabi', 'linux_i686'), ('py26', 'noabi', 'noarch')]
supported3 = [('cp26', 'noabi', 'linux_i686'), ('py26', 'noabi', 'noarch'),
('cp27', 'noabi', 'linux_i686'), ('py27', 'noabi', 'noarch')]
for supp in (supported, supported2, supported3):
context = lambda: list(supp)
for wheel in cand_wheels:
wheel.context = context
best = max(cand_wheels)
assert list(best.tags)[0] == supp[0]
# assert_equal(
# list(map(get_tags, pick_best(cand_wheels, supp, top=False))), supp)

View File

@@ -0,0 +1,55 @@
# Test wheel.
# The file has the following contents:
# hello.pyd
# hello/hello.py
# hello/__init__.py
# test-1.0.data/data/hello.dat
# test-1.0.data/headers/hello.dat
# test-1.0.data/scripts/hello.sh
# test-1.0.dist-info/WHEEL
# test-1.0.dist-info/METADATA
# test-1.0.dist-info/RECORD
# The root is PLATLIB
# So, some in PLATLIB, and one in each of DATA, HEADERS and SCRIPTS.
import wheel.tool
import wheel.pep425tags
from wheel.install import WheelFile
from tempfile import mkdtemp
import shutil
import os
THISDIR = os.path.dirname(__file__)
TESTWHEEL = os.path.join(THISDIR, 'test-1.0-py2.py3-none-win32.whl')
def check(*path):
return os.path.exists(os.path.join(*path))
def test_install():
tempdir = mkdtemp()
def get_supported():
return list(wheel.pep425tags.get_supported()) + [('py3', 'none', 'win32')]
whl = WheelFile(TESTWHEEL, context=get_supported)
assert whl.supports_current_python(get_supported)
try:
locs = {}
for key in ('purelib', 'platlib', 'scripts', 'headers', 'data'):
locs[key] = os.path.join(tempdir, key)
os.mkdir(locs[key])
whl.install(overrides=locs)
assert len(os.listdir(locs['purelib'])) == 0
assert check(locs['platlib'], 'hello.pyd')
assert check(locs['platlib'], 'hello', 'hello.py')
assert check(locs['platlib'], 'hello', '__init__.py')
assert check(locs['data'], 'hello.dat')
assert check(locs['headers'], 'hello.dat')
assert check(locs['scripts'], 'hello.sh')
assert check(locs['platlib'], 'test-1.0.dist-info', 'RECORD')
finally:
shutil.rmtree(tempdir)
def test_install_tool():
"""Slightly improve coverage of wheel.install"""
wheel.tool.install([TESTWHEEL], force=True, dry_run=True)

View File

@@ -0,0 +1,98 @@
import tempfile
import os.path
import unittest
import json
from wheel.signatures import keys
wheel_json = """
{
"verifiers": [
{
"scope": "+",
"vk": "bp-bjK2fFgtA-8DhKKAAPm9-eAZcX_u03oBv2RlKOBc"
},
{
"scope": "+",
"vk": "KAHZBfyqFW3OcFDbLSG4nPCjXxUPy72phP9I4Rn9MAo"
},
{
"scope": "+",
"vk": "tmAYCrSfj8gtJ10v3VkvW7jOndKmQIYE12hgnFu3cvk"
}
],
"signers": [
{
"scope": "+",
"vk": "tmAYCrSfj8gtJ10v3VkvW7jOndKmQIYE12hgnFu3cvk"
},
{
"scope": "+",
"vk": "KAHZBfyqFW3OcFDbLSG4nPCjXxUPy72phP9I4Rn9MAo"
}
],
"schema": 1
}
"""
class TestWheelKeys(unittest.TestCase):
def setUp(self):
self.config = tempfile.NamedTemporaryFile(suffix='.json')
self.config.close()
self.config_path, self.config_filename = os.path.split(self.config.name)
def load(*args):
return [self.config_path]
def save(*args):
return self.config_path
keys.load_config_paths = load
keys.save_config_path = save
self.wk = keys.WheelKeys()
self.wk.CONFIG_NAME = self.config_filename
def tearDown(self):
os.unlink(self.config.name)
def test_load_save(self):
self.wk.data = json.loads(wheel_json)
self.wk.add_signer('+', '67890')
self.wk.add_signer('scope', 'abcdefg')
self.wk.trust('epocs', 'gfedcba')
self.wk.trust('+', '12345')
self.wk.save()
del self.wk.data
self.wk.load()
signers = self.wk.signers('scope')
self.assertTrue(signers[0] == ('scope', 'abcdefg'), self.wk.data['signers'])
self.assertTrue(signers[1][0] == '+', self.wk.data['signers'])
trusted = self.wk.trusted('epocs')
self.assertTrue(trusted[0] == ('epocs', 'gfedcba'))
self.assertTrue(trusted[1][0] == '+')
self.wk.untrust('epocs', 'gfedcba')
trusted = self.wk.trusted('epocs')
self.assertTrue(('epocs', 'gfedcba') not in trusted)
def test_load_save_incomplete(self):
self.wk.data = json.loads(wheel_json)
del self.wk.data['signers']
self.wk.data['schema'] = self.wk.SCHEMA+1
self.wk.save()
try:
self.wk.load()
except ValueError:
pass
else:
raise Exception("Expected ValueError")
del self.wk.data['schema']
self.wk.save()
self.wk.load()

View File

@@ -0,0 +1,6 @@
import wheel.paths
from distutils.command.install import SCHEME_KEYS
def test_path():
d = wheel.paths.get_install_paths('wheel')
assert len(d) == len(SCHEME_KEYS)

View File

@@ -0,0 +1,43 @@
import unittest
from wheel.pep425tags import get_supported
from wheel.install import WheelFile
WHEELPAT = "%(name)s-%(ver)s-%(pyver)s-%(abi)s-%(arch)s.whl"
def make_wheel(name, ver, pyver, abi, arch):
name = WHEELPAT % dict(name=name, ver=ver, pyver=pyver, abi=abi,
arch=arch)
return WheelFile(name)
# This relies on the fact that generate_supported will always return the
# exact pyver, abi, and architecture for its first (best) match.
sup = get_supported()
pyver, abi, arch = sup[0]
genver = 'py' + pyver[2:]
majver = genver[:3]
COMBINATIONS = (
('bar', '0.9', 'py2.py3', 'none', 'any'),
('bar', '0.9', majver, 'none', 'any'),
('bar', '0.9', genver, 'none', 'any'),
('bar', '0.9', pyver, abi, arch),
('bar', '1.3.2', majver, 'none', 'any'),
('bar', '3.1', genver, 'none', 'any'),
('bar', '3.1', pyver, abi, arch),
('foo', '1.0', majver, 'none', 'any'),
('foo', '1.1', pyver, abi, arch),
('foo', '2.1', majver + '0', 'none', 'any'),
# This will not be compatible for Python x.0. Beware when we hit Python
# 4.0, and don't test with 3.0!!!
('foo', '2.1', majver + '1', 'none', 'any'),
('foo', '2.1', pyver , 'none', 'any'),
('foo', '2.1', pyver , abi, arch),
)
WHEELS = [ make_wheel(*args) for args in COMBINATIONS ]
class TestRanking(unittest.TestCase):
def test_comparison(self):
for i in range(len(WHEELS)-1):
for j in range(i):
self.assertTrue(WHEELS[j]<WHEELS[i])

View File

@@ -0,0 +1,47 @@
from wheel import signatures
from wheel.signatures import djbec, ed25519py
from wheel.util import binary
def test_getlib():
signatures.get_ed25519ll()
def test_djbec():
djbec.dsa_test()
djbec.dh_test()
def test_ed25519py():
kp0 = ed25519py.crypto_sign_keypair(binary(' '*32))
kp = ed25519py.crypto_sign_keypair()
signed = ed25519py.crypto_sign(binary('test'), kp.sk)
ed25519py.crypto_sign_open(signed, kp.vk)
try:
ed25519py.crypto_sign_open(signed, kp0.vk)
except ValueError:
pass
else:
raise Exception("Expected ValueError")
try:
ed25519py.crypto_sign_keypair(binary(' '*33))
except ValueError:
pass
else:
raise Exception("Expected ValueError")
try:
ed25519py.crypto_sign(binary(''), binary(' ')*31)
except ValueError:
pass
else:
raise Exception("Expected ValueError")
try:
ed25519py.crypto_sign_open(binary(''), binary(' ')*31)
except ValueError:
pass
else:
raise Exception("Expected ValueError")

View File

@@ -0,0 +1,176 @@
"""
Tests for the bdist_wheel tag options (--python-tag, --universal, and
--plat-name)
"""
import sys
import shutil
import pytest
import py.path
import tempfile
import subprocess
SETUP_PY = """\
from setuptools import setup, Extension
setup(
name="Test",
version="1.0",
author_email="author@example.com",
py_modules=["test"],
{ext_modules}
)
"""
EXT_MODULES = "ext_modules=[Extension('_test', sources=['test.c'])],"
@pytest.fixture
def temp_pkg(request, ext=False):
tempdir = tempfile.mkdtemp()
def fin():
shutil.rmtree(tempdir)
request.addfinalizer(fin)
temppath = py.path.local(tempdir)
temppath.join('test.py').write('print("Hello, world")')
if ext:
temppath.join('test.c').write('#include <stdio.h>')
setup_py = SETUP_PY.format(ext_modules=EXT_MODULES)
else:
setup_py = SETUP_PY.format(ext_modules='')
temppath.join('setup.py').write(setup_py)
return temppath
@pytest.fixture
def temp_ext_pkg(request):
return temp_pkg(request, ext=True)
def test_default_tag(temp_pkg):
subprocess.check_call([sys.executable, 'setup.py', 'bdist_wheel'],
cwd=str(temp_pkg))
dist_dir = temp_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename == 'Test-1.0-py%s-none-any.whl' % (sys.version[0],)
assert wheels[0].ext == '.whl'
def test_explicit_tag(temp_pkg):
subprocess.check_call(
[sys.executable, 'setup.py', 'bdist_wheel', '--python-tag=py32'],
cwd=str(temp_pkg))
dist_dir = temp_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.startswith('Test-1.0-py32-')
assert wheels[0].ext == '.whl'
def test_universal_tag(temp_pkg):
subprocess.check_call(
[sys.executable, 'setup.py', 'bdist_wheel', '--universal'],
cwd=str(temp_pkg))
dist_dir = temp_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.startswith('Test-1.0-py2.py3-')
assert wheels[0].ext == '.whl'
def test_universal_beats_explicit_tag(temp_pkg):
subprocess.check_call(
[sys.executable, 'setup.py', 'bdist_wheel', '--universal', '--python-tag=py32'],
cwd=str(temp_pkg))
dist_dir = temp_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.startswith('Test-1.0-py2.py3-')
assert wheels[0].ext == '.whl'
def test_universal_in_setup_cfg(temp_pkg):
temp_pkg.join('setup.cfg').write('[bdist_wheel]\nuniversal=1')
subprocess.check_call(
[sys.executable, 'setup.py', 'bdist_wheel'],
cwd=str(temp_pkg))
dist_dir = temp_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.startswith('Test-1.0-py2.py3-')
assert wheels[0].ext == '.whl'
def test_pythontag_in_setup_cfg(temp_pkg):
temp_pkg.join('setup.cfg').write('[bdist_wheel]\npython_tag=py32')
subprocess.check_call(
[sys.executable, 'setup.py', 'bdist_wheel'],
cwd=str(temp_pkg))
dist_dir = temp_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.startswith('Test-1.0-py32-')
assert wheels[0].ext == '.whl'
def test_legacy_wheel_section_in_setup_cfg(temp_pkg):
temp_pkg.join('setup.cfg').write('[wheel]\nuniversal=1')
subprocess.check_call(
[sys.executable, 'setup.py', 'bdist_wheel'],
cwd=str(temp_pkg))
dist_dir = temp_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.startswith('Test-1.0-py2.py3-')
assert wheels[0].ext == '.whl'
def test_plat_name_purepy(temp_pkg):
subprocess.check_call(
[sys.executable, 'setup.py', 'bdist_wheel', '--plat-name=testplat.pure'],
cwd=str(temp_pkg))
dist_dir = temp_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.endswith('-testplat_pure.whl')
assert wheels[0].ext == '.whl'
def test_plat_name_ext(temp_ext_pkg):
try:
subprocess.check_call(
[sys.executable, 'setup.py', 'bdist_wheel', '--plat-name=testplat.arch'],
cwd=str(temp_ext_pkg))
except subprocess.CalledProcessError:
pytest.skip("Cannot compile C Extensions")
dist_dir = temp_ext_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.endswith('-testplat_arch.whl')
assert wheels[0].ext == '.whl'
def test_plat_name_purepy_in_setupcfg(temp_pkg):
temp_pkg.join('setup.cfg').write('[bdist_wheel]\nplat_name=testplat.pure')
subprocess.check_call(
[sys.executable, 'setup.py', 'bdist_wheel'],
cwd=str(temp_pkg))
dist_dir = temp_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.endswith('-testplat_pure.whl')
assert wheels[0].ext == '.whl'
def test_plat_name_ext_in_setupcfg(temp_ext_pkg):
temp_ext_pkg.join('setup.cfg').write('[bdist_wheel]\nplat_name=testplat.arch')
try:
subprocess.check_call(
[sys.executable, 'setup.py', 'bdist_wheel'],
cwd=str(temp_ext_pkg))
except subprocess.CalledProcessError:
pytest.skip("Cannot compile C Extensions")
dist_dir = temp_ext_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.endswith('-testplat_arch.whl')
assert wheels[0].ext == '.whl'

View File

@@ -0,0 +1,25 @@
from .. import tool
def test_keygen():
def get_keyring():
WheelKeys, keyring = tool.get_keyring()
class WheelKeysTest(WheelKeys):
def save(self):
pass
class keyringTest:
@classmethod
def get_keyring(cls):
class keyringTest2:
pw = None
def set_password(self, a, b, c):
self.pw = c
def get_password(self, a, b):
return self.pw
return keyringTest2()
return WheelKeysTest, keyringTest
tool.keygen(get_keyring=get_keyring)

View File

@@ -0,0 +1,142 @@
import os
import wheel.install
import wheel.archive
import hashlib
try:
from StringIO import StringIO
except ImportError:
from io import BytesIO as StringIO
import codecs
import zipfile
import pytest
import shutil
import tempfile
from contextlib import contextmanager
@contextmanager
def environ(key, value):
old_value = os.environ.get(key)
try:
os.environ[key] = value
yield
finally:
if old_value is None:
del os.environ[key]
else:
os.environ[key] = old_value
@contextmanager
def temporary_directory():
# tempfile.TemporaryDirectory doesn't exist in Python 2.
tempdir = tempfile.mkdtemp()
try:
yield tempdir
finally:
shutil.rmtree(tempdir)
@contextmanager
def readable_zipfile(path):
# zipfile.ZipFile() isn't a context manager under Python 2.
zf = zipfile.ZipFile(path, 'r')
try:
yield zf
finally:
zf.close()
def test_verifying_zipfile():
if not hasattr(zipfile.ZipExtFile, '_update_crc'):
pytest.skip('No ZIP verification. Missing ZipExtFile._update_crc.')
sio = StringIO()
zf = zipfile.ZipFile(sio, 'w')
zf.writestr("one", b"first file")
zf.writestr("two", b"second file")
zf.writestr("three", b"third file")
zf.close()
# In default mode, VerifyingZipFile checks the hash of any read file
# mentioned with set_expected_hash(). Files not mentioned with
# set_expected_hash() are not checked.
vzf = wheel.install.VerifyingZipFile(sio, 'r')
vzf.set_expected_hash("one", hashlib.sha256(b"first file").digest())
vzf.set_expected_hash("three", "blurble")
vzf.open("one").read()
vzf.open("two").read()
try:
vzf.open("three").read()
except wheel.install.BadWheelFile:
pass
else:
raise Exception("expected exception 'BadWheelFile()'")
# In strict mode, VerifyingZipFile requires every read file to be
# mentioned with set_expected_hash().
vzf.strict = True
try:
vzf.open("two").read()
except wheel.install.BadWheelFile:
pass
else:
raise Exception("expected exception 'BadWheelFile()'")
vzf.set_expected_hash("two", None)
vzf.open("two").read()
def test_pop_zipfile():
sio = StringIO()
zf = wheel.install.VerifyingZipFile(sio, 'w')
zf.writestr("one", b"first file")
zf.writestr("two", b"second file")
zf.close()
try:
zf.pop()
except RuntimeError:
pass # already closed
else:
raise Exception("expected RuntimeError")
zf = wheel.install.VerifyingZipFile(sio, 'a')
zf.pop()
zf.close()
zf = wheel.install.VerifyingZipFile(sio, 'r')
assert len(zf.infolist()) == 1
def test_zipfile_timestamp():
# An environment variable can be used to influence the timestamp on
# TarInfo objects inside the zip. See issue #143. TemporaryDirectory is
# not a context manager under Python 3.
with temporary_directory() as tempdir:
for filename in ('one', 'two', 'three'):
path = os.path.join(tempdir, filename)
with codecs.open(path, 'w', encoding='utf-8') as fp:
fp.write(filename + '\n')
zip_base_name = os.path.join(tempdir, 'dummy')
# The earliest date representable in TarInfos, 1980-01-01
with environ('SOURCE_DATE_EPOCH', '315576060'):
zip_filename = wheel.archive.make_wheelfile_inner(
zip_base_name, tempdir)
with readable_zipfile(zip_filename) as zf:
for info in zf.infolist():
assert info.date_time[:3] == (1980, 1, 1)
def test_zipfile_attributes():
# With the change from ZipFile.write() to .writestr(), we need to manually
# set member attributes.
with temporary_directory() as tempdir:
files = (('foo', 0o644), ('bar', 0o755))
for filename, mode in files:
path = os.path.join(tempdir, filename)
with codecs.open(path, 'w', encoding='utf-8') as fp:
fp.write(filename + '\n')
os.chmod(path, mode)
zip_base_name = os.path.join(tempdir, 'dummy')
zip_filename = wheel.archive.make_wheelfile_inner(
zip_base_name, tempdir)
with readable_zipfile(zip_filename) as zf:
for filename, mode in files:
info = zf.getinfo(os.path.join(tempdir, filename))
assert info.external_attr == (mode | 0o100000) << 16
assert info.compress_type == zipfile.ZIP_DEFLATED