mirror of
https://github.com/bvanroll/college-python-image.git
synced 2025-08-31 04:52:51 +00:00
first commit
This commit is contained in:
2
projecten1/lib/python3.6/site-packages/wheel/__init__.py
Normal file
2
projecten1/lib/python3.6/site-packages/wheel/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
# __variables__ with double-quoted values will be available in setup.py:
|
||||
__version__ = "0.31.1"
|
19
projecten1/lib/python3.6/site-packages/wheel/__main__.py
Normal file
19
projecten1/lib/python3.6/site-packages/wheel/__main__.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""
|
||||
Wheel command line tool (enable python -m wheel syntax)
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
def main(): # needed for console script
|
||||
if __package__ == '':
|
||||
# To be able to run 'python wheel-0.9.whl/wheel':
|
||||
import os.path
|
||||
path = os.path.dirname(os.path.dirname(__file__))
|
||||
sys.path[0:0] = [path]
|
||||
import wheel.tool
|
||||
sys.exit(wheel.tool.main())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
77
projecten1/lib/python3.6/site-packages/wheel/archive.py
Normal file
77
projecten1/lib/python3.6/site-packages/wheel/archive.py
Normal file
@@ -0,0 +1,77 @@
|
||||
"""
|
||||
Archive tools for wheel.
|
||||
"""
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import time
|
||||
import zipfile
|
||||
from distutils import log
|
||||
|
||||
|
||||
def archive_wheelfile(base_name, base_dir):
|
||||
"""Archive all files under `base_dir` in a whl file and name it like
|
||||
`base_name`.
|
||||
"""
|
||||
olddir = os.path.abspath(os.curdir)
|
||||
base_name = os.path.abspath(base_name)
|
||||
try:
|
||||
os.chdir(base_dir)
|
||||
return make_wheelfile_inner(base_name)
|
||||
finally:
|
||||
os.chdir(olddir)
|
||||
|
||||
|
||||
def make_wheelfile_inner(base_name, base_dir='.'):
|
||||
"""Create a whl file from all the files under 'base_dir'.
|
||||
|
||||
Places .dist-info at the end of the archive."""
|
||||
|
||||
zip_filename = base_name + ".whl"
|
||||
|
||||
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
|
||||
|
||||
# Some applications need reproducible .whl files, but they can't do this
|
||||
# without forcing the timestamp of the individual ZipInfo objects. See
|
||||
# issue #143.
|
||||
timestamp = os.environ.get('SOURCE_DATE_EPOCH')
|
||||
if timestamp is None:
|
||||
date_time = None
|
||||
else:
|
||||
date_time = time.gmtime(int(timestamp))[0:6]
|
||||
|
||||
score = {'WHEEL': 1, 'METADATA': 2, 'RECORD': 3}
|
||||
|
||||
def writefile(path, date_time):
|
||||
st = os.stat(path)
|
||||
if date_time is None:
|
||||
mtime = time.gmtime(st.st_mtime)
|
||||
date_time = mtime[0:6]
|
||||
zinfo = zipfile.ZipInfo(path, date_time)
|
||||
zinfo.external_attr = st.st_mode << 16
|
||||
zinfo.compress_type = zipfile.ZIP_DEFLATED
|
||||
with open(path, 'rb') as fp:
|
||||
zip.writestr(zinfo, fp.read())
|
||||
log.info("adding '%s'" % path)
|
||||
|
||||
with zipfile.ZipFile(zip_filename, "w", compression=zipfile.ZIP_DEFLATED,
|
||||
allowZip64=True) as zip:
|
||||
deferred = []
|
||||
for dirpath, dirnames, filenames in os.walk(base_dir):
|
||||
# Sort the directory names so that `os.walk` will walk them in a
|
||||
# defined order on the next iteration.
|
||||
dirnames.sort()
|
||||
for name in sorted(filenames):
|
||||
path = os.path.normpath(os.path.join(dirpath, name))
|
||||
|
||||
if os.path.isfile(path):
|
||||
if dirpath.endswith('.dist-info'):
|
||||
deferred.append((score.get(name, 0), path))
|
||||
else:
|
||||
writefile(path, date_time)
|
||||
|
||||
deferred.sort()
|
||||
for score, path in deferred:
|
||||
writefile(path, date_time)
|
||||
|
||||
return zip_filename
|
409
projecten1/lib/python3.6/site-packages/wheel/bdist_wheel.py
Normal file
409
projecten1/lib/python3.6/site-packages/wheel/bdist_wheel.py
Normal file
@@ -0,0 +1,409 @@
|
||||
"""
|
||||
Create a wheel (.whl) distribution.
|
||||
|
||||
A wheel is a built archive format.
|
||||
"""
|
||||
|
||||
import csv
|
||||
import hashlib
|
||||
import os
|
||||
import subprocess
|
||||
import shutil
|
||||
import sys
|
||||
import re
|
||||
from email.generator import Generator
|
||||
from distutils.core import Command
|
||||
from distutils.sysconfig import get_python_version
|
||||
from distutils import log as logger
|
||||
from shutil import rmtree
|
||||
|
||||
import pkg_resources
|
||||
|
||||
from .pep425tags import get_abbr_impl, get_impl_ver, get_abi_tag, get_platform
|
||||
from .util import native, open_for_csv
|
||||
from .archive import archive_wheelfile
|
||||
from .pkginfo import write_pkg_info
|
||||
from .metadata import pkginfo_to_metadata
|
||||
from . import pep425tags
|
||||
from . import __version__ as wheel_version
|
||||
|
||||
|
||||
safe_name = pkg_resources.safe_name
|
||||
safe_version = pkg_resources.safe_version
|
||||
|
||||
PY_LIMITED_API_PATTERN = r'cp3\d'
|
||||
|
||||
|
||||
def safer_name(name):
|
||||
return safe_name(name).replace('-', '_')
|
||||
|
||||
|
||||
def safer_version(version):
|
||||
return safe_version(version).replace('-', '_')
|
||||
|
||||
|
||||
class bdist_wheel(Command):
|
||||
|
||||
description = 'create a wheel distribution'
|
||||
|
||||
user_options = [('bdist-dir=', 'b',
|
||||
"temporary directory for creating the distribution"),
|
||||
('plat-name=', 'p',
|
||||
"platform name to embed in generated filenames "
|
||||
"(default: %s)" % get_platform()),
|
||||
('keep-temp', 'k',
|
||||
"keep the pseudo-installation tree around after " +
|
||||
"creating the distribution archive"),
|
||||
('dist-dir=', 'd',
|
||||
"directory to put final built distributions in"),
|
||||
('skip-build', None,
|
||||
"skip rebuilding everything (for testing/debugging)"),
|
||||
('relative', None,
|
||||
"build the archive using relative paths"
|
||||
"(default: false)"),
|
||||
('owner=', 'u',
|
||||
"Owner name used when creating a tar file"
|
||||
" [default: current user]"),
|
||||
('group=', 'g',
|
||||
"Group name used when creating a tar file"
|
||||
" [default: current group]"),
|
||||
('universal', None,
|
||||
"make a universal wheel"
|
||||
" (default: false)"),
|
||||
('python-tag=', None,
|
||||
"Python implementation compatibility tag"
|
||||
" (default: py%s)" % get_impl_ver()[0]),
|
||||
('build-number=', None,
|
||||
"Build number for this particular version. "
|
||||
"As specified in PEP-0427, this must start with a digit. "
|
||||
"[default: None]"),
|
||||
('py-limited-api=', None,
|
||||
"Python tag (cp32|cp33|cpNN) for abi3 wheel tag"
|
||||
" (default: false)"),
|
||||
]
|
||||
|
||||
boolean_options = ['keep-temp', 'skip-build', 'relative', 'universal']
|
||||
|
||||
def initialize_options(self):
|
||||
self.bdist_dir = None
|
||||
self.data_dir = None
|
||||
self.plat_name = None
|
||||
self.plat_tag = None
|
||||
self.format = 'zip'
|
||||
self.keep_temp = False
|
||||
self.dist_dir = None
|
||||
self.distinfo_dir = None
|
||||
self.egginfo_dir = None
|
||||
self.root_is_pure = None
|
||||
self.skip_build = None
|
||||
self.relative = False
|
||||
self.owner = None
|
||||
self.group = None
|
||||
self.universal = False
|
||||
self.python_tag = 'py' + get_impl_ver()[0]
|
||||
self.build_number = None
|
||||
self.py_limited_api = False
|
||||
self.plat_name_supplied = False
|
||||
|
||||
def finalize_options(self):
|
||||
if self.bdist_dir is None:
|
||||
bdist_base = self.get_finalized_command('bdist').bdist_base
|
||||
self.bdist_dir = os.path.join(bdist_base, 'wheel')
|
||||
|
||||
self.data_dir = self.wheel_dist_name + '.data'
|
||||
self.plat_name_supplied = self.plat_name is not None
|
||||
|
||||
need_options = ('dist_dir', 'plat_name', 'skip_build')
|
||||
|
||||
self.set_undefined_options('bdist',
|
||||
*zip(need_options, need_options))
|
||||
|
||||
self.root_is_pure = not (self.distribution.has_ext_modules()
|
||||
or self.distribution.has_c_libraries())
|
||||
|
||||
if self.py_limited_api and not re.match(PY_LIMITED_API_PATTERN, self.py_limited_api):
|
||||
raise ValueError("py-limited-api must match '%s'" % PY_LIMITED_API_PATTERN)
|
||||
|
||||
# Support legacy [wheel] section for setting universal
|
||||
wheel = self.distribution.get_option_dict('wheel')
|
||||
if 'universal' in wheel:
|
||||
# please don't define this in your global configs
|
||||
val = wheel['universal'][1].strip()
|
||||
if val.lower() in ('1', 'true', 'yes'):
|
||||
self.universal = True
|
||||
|
||||
if self.build_number is not None and not self.build_number[:1].isdigit():
|
||||
raise ValueError("Build tag (build-number) must start with a digit.")
|
||||
|
||||
@property
|
||||
def wheel_dist_name(self):
|
||||
"""Return distribution full name with - replaced with _"""
|
||||
components = (safer_name(self.distribution.get_name()),
|
||||
safer_version(self.distribution.get_version()))
|
||||
if self.build_number:
|
||||
components += (self.build_number,)
|
||||
return '-'.join(components)
|
||||
|
||||
def get_tag(self):
|
||||
# bdist sets self.plat_name if unset, we should only use it for purepy
|
||||
# wheels if the user supplied it.
|
||||
if self.plat_name_supplied:
|
||||
plat_name = self.plat_name
|
||||
elif self.root_is_pure:
|
||||
plat_name = 'any'
|
||||
else:
|
||||
plat_name = self.plat_name or get_platform()
|
||||
if plat_name in ('linux-x86_64', 'linux_x86_64') and sys.maxsize == 2147483647:
|
||||
plat_name = 'linux_i686'
|
||||
plat_name = plat_name.replace('-', '_').replace('.', '_')
|
||||
|
||||
if self.root_is_pure:
|
||||
if self.universal:
|
||||
impl = 'py2.py3'
|
||||
else:
|
||||
impl = self.python_tag
|
||||
tag = (impl, 'none', plat_name)
|
||||
else:
|
||||
impl_name = get_abbr_impl()
|
||||
impl_ver = get_impl_ver()
|
||||
impl = impl_name + impl_ver
|
||||
# We don't work on CPython 3.1, 3.0.
|
||||
if self.py_limited_api and (impl_name + impl_ver).startswith('cp3'):
|
||||
impl = self.py_limited_api
|
||||
abi_tag = 'abi3'
|
||||
else:
|
||||
abi_tag = str(get_abi_tag()).lower()
|
||||
tag = (impl, abi_tag, plat_name)
|
||||
supported_tags = pep425tags.get_supported(
|
||||
supplied_platform=plat_name if self.plat_name_supplied else None)
|
||||
# XXX switch to this alternate implementation for non-pure:
|
||||
if not self.py_limited_api:
|
||||
assert tag == supported_tags[0], "%s != %s" % (tag, supported_tags[0])
|
||||
assert tag in supported_tags, "would build wheel with unsupported tag {}".format(tag)
|
||||
return tag
|
||||
|
||||
def get_archive_basename(self):
|
||||
"""Return archive name without extension"""
|
||||
|
||||
impl_tag, abi_tag, plat_tag = self.get_tag()
|
||||
|
||||
archive_basename = "%s-%s-%s-%s" % (
|
||||
self.wheel_dist_name,
|
||||
impl_tag,
|
||||
abi_tag,
|
||||
plat_tag)
|
||||
return archive_basename
|
||||
|
||||
def run(self):
|
||||
build_scripts = self.reinitialize_command('build_scripts')
|
||||
build_scripts.executable = 'python'
|
||||
|
||||
if not self.skip_build:
|
||||
self.run_command('build')
|
||||
|
||||
install = self.reinitialize_command('install',
|
||||
reinit_subcommands=True)
|
||||
install.root = self.bdist_dir
|
||||
install.compile = False
|
||||
install.skip_build = self.skip_build
|
||||
install.warn_dir = False
|
||||
|
||||
# A wheel without setuptools scripts is more cross-platform.
|
||||
# Use the (undocumented) `no_ep` option to setuptools'
|
||||
# install_scripts command to avoid creating entry point scripts.
|
||||
install_scripts = self.reinitialize_command('install_scripts')
|
||||
install_scripts.no_ep = True
|
||||
|
||||
# Use a custom scheme for the archive, because we have to decide
|
||||
# at installation time which scheme to use.
|
||||
for key in ('headers', 'scripts', 'data', 'purelib', 'platlib'):
|
||||
setattr(install,
|
||||
'install_' + key,
|
||||
os.path.join(self.data_dir, key))
|
||||
|
||||
basedir_observed = ''
|
||||
|
||||
if os.name == 'nt':
|
||||
# win32 barfs if any of these are ''; could be '.'?
|
||||
# (distutils.command.install:change_roots bug)
|
||||
basedir_observed = os.path.normpath(os.path.join(self.data_dir, '..'))
|
||||
self.install_libbase = self.install_lib = basedir_observed
|
||||
|
||||
setattr(install,
|
||||
'install_purelib' if self.root_is_pure else 'install_platlib',
|
||||
basedir_observed)
|
||||
|
||||
logger.info("installing to %s", self.bdist_dir)
|
||||
|
||||
self.run_command('install')
|
||||
|
||||
archive_basename = self.get_archive_basename()
|
||||
|
||||
pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
|
||||
if not self.relative:
|
||||
archive_root = self.bdist_dir
|
||||
else:
|
||||
archive_root = os.path.join(
|
||||
self.bdist_dir,
|
||||
self._ensure_relative(install.install_base))
|
||||
|
||||
self.set_undefined_options(
|
||||
'install_egg_info', ('target', 'egginfo_dir'))
|
||||
self.distinfo_dir = os.path.join(self.bdist_dir,
|
||||
'%s.dist-info' % self.wheel_dist_name)
|
||||
self.egg2dist(self.egginfo_dir,
|
||||
self.distinfo_dir)
|
||||
|
||||
self.write_wheelfile(self.distinfo_dir)
|
||||
|
||||
self.write_record(self.bdist_dir, self.distinfo_dir)
|
||||
|
||||
# Make the archive
|
||||
if not os.path.exists(self.dist_dir):
|
||||
os.makedirs(self.dist_dir)
|
||||
wheel_name = archive_wheelfile(pseudoinstall_root, archive_root)
|
||||
|
||||
# Sign the archive
|
||||
if 'WHEEL_TOOL' in os.environ:
|
||||
subprocess.call([os.environ['WHEEL_TOOL'], 'sign', wheel_name])
|
||||
|
||||
# Add to 'Distribution.dist_files' so that the "upload" command works
|
||||
getattr(self.distribution, 'dist_files', []).append(
|
||||
('bdist_wheel', get_python_version(), wheel_name))
|
||||
|
||||
if not self.keep_temp:
|
||||
logger.info('removing %s', self.bdist_dir)
|
||||
if not self.dry_run:
|
||||
rmtree(self.bdist_dir)
|
||||
|
||||
def write_wheelfile(self, wheelfile_base, generator='bdist_wheel (' + wheel_version + ')'):
|
||||
from email.message import Message
|
||||
msg = Message()
|
||||
msg['Wheel-Version'] = '1.0' # of the spec
|
||||
msg['Generator'] = generator
|
||||
msg['Root-Is-Purelib'] = str(self.root_is_pure).lower()
|
||||
if self.build_number is not None:
|
||||
msg['Build'] = self.build_number
|
||||
|
||||
# Doesn't work for bdist_wininst
|
||||
impl_tag, abi_tag, plat_tag = self.get_tag()
|
||||
for impl in impl_tag.split('.'):
|
||||
for abi in abi_tag.split('.'):
|
||||
for plat in plat_tag.split('.'):
|
||||
msg['Tag'] = '-'.join((impl, abi, plat))
|
||||
|
||||
wheelfile_path = os.path.join(wheelfile_base, 'WHEEL')
|
||||
logger.info('creating %s', wheelfile_path)
|
||||
with open(wheelfile_path, 'w') as f:
|
||||
Generator(f, maxheaderlen=0).flatten(msg)
|
||||
|
||||
def _ensure_relative(self, path):
|
||||
# copied from dir_util, deleted
|
||||
drive, path = os.path.splitdrive(path)
|
||||
if path[0:1] == os.sep:
|
||||
path = drive + path[1:]
|
||||
return path
|
||||
|
||||
def license_file(self):
|
||||
"""Return license filename from a license-file key in setup.cfg, or None."""
|
||||
metadata = self.distribution.get_option_dict('metadata')
|
||||
if 'license_file' not in metadata:
|
||||
return None
|
||||
return metadata['license_file'][1]
|
||||
|
||||
def egg2dist(self, egginfo_path, distinfo_path):
|
||||
"""Convert an .egg-info directory into a .dist-info directory"""
|
||||
def adios(p):
|
||||
"""Appropriately delete directory, file or link."""
|
||||
if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p):
|
||||
shutil.rmtree(p)
|
||||
elif os.path.exists(p):
|
||||
os.unlink(p)
|
||||
|
||||
adios(distinfo_path)
|
||||
|
||||
if not os.path.exists(egginfo_path):
|
||||
# There is no egg-info. This is probably because the egg-info
|
||||
# file/directory is not named matching the distribution name used
|
||||
# to name the archive file. Check for this case and report
|
||||
# accordingly.
|
||||
import glob
|
||||
pat = os.path.join(os.path.dirname(egginfo_path), '*.egg-info')
|
||||
possible = glob.glob(pat)
|
||||
err = "Egg metadata expected at %s but not found" % (egginfo_path,)
|
||||
if possible:
|
||||
alt = os.path.basename(possible[0])
|
||||
err += " (%s found - possible misnamed archive file?)" % (alt,)
|
||||
|
||||
raise ValueError(err)
|
||||
|
||||
if os.path.isfile(egginfo_path):
|
||||
# .egg-info is a single file
|
||||
pkginfo_path = egginfo_path
|
||||
pkg_info = pkginfo_to_metadata(egginfo_path, egginfo_path)
|
||||
os.mkdir(distinfo_path)
|
||||
else:
|
||||
# .egg-info is a directory
|
||||
pkginfo_path = os.path.join(egginfo_path, 'PKG-INFO')
|
||||
pkg_info = pkginfo_to_metadata(egginfo_path, pkginfo_path)
|
||||
|
||||
# ignore common egg metadata that is useless to wheel
|
||||
shutil.copytree(egginfo_path, distinfo_path,
|
||||
ignore=lambda x, y: {'PKG-INFO', 'requires.txt', 'SOURCES.txt',
|
||||
'not-zip-safe'}
|
||||
)
|
||||
|
||||
# delete dependency_links if it is only whitespace
|
||||
dependency_links_path = os.path.join(distinfo_path, 'dependency_links.txt')
|
||||
with open(dependency_links_path, 'r') as dependency_links_file:
|
||||
dependency_links = dependency_links_file.read().strip()
|
||||
if not dependency_links:
|
||||
adios(dependency_links_path)
|
||||
|
||||
write_pkg_info(os.path.join(distinfo_path, 'METADATA'), pkg_info)
|
||||
|
||||
# XXX heuristically copy any LICENSE/LICENSE.txt?
|
||||
license = self.license_file()
|
||||
if license:
|
||||
license_filename = 'LICENSE.txt'
|
||||
shutil.copy(license, os.path.join(distinfo_path, license_filename))
|
||||
|
||||
adios(egginfo_path)
|
||||
|
||||
def write_record(self, bdist_dir, distinfo_dir):
|
||||
from .util import urlsafe_b64encode
|
||||
|
||||
record_path = os.path.join(distinfo_dir, 'RECORD')
|
||||
record_relpath = os.path.relpath(record_path, bdist_dir)
|
||||
|
||||
def walk():
|
||||
for dir, dirs, files in os.walk(bdist_dir):
|
||||
dirs.sort()
|
||||
for f in sorted(files):
|
||||
yield os.path.join(dir, f)
|
||||
|
||||
def skip(path):
|
||||
"""Wheel hashes every possible file."""
|
||||
return (path == record_relpath)
|
||||
|
||||
with open_for_csv(record_path, 'w+') as record_file:
|
||||
writer = csv.writer(record_file)
|
||||
for path in walk():
|
||||
relpath = os.path.relpath(path, bdist_dir)
|
||||
if skip(relpath):
|
||||
hash = ''
|
||||
size = ''
|
||||
else:
|
||||
with open(path, 'rb') as f:
|
||||
data = f.read()
|
||||
digest = hashlib.sha256(data).digest()
|
||||
hash = 'sha256=' + native(urlsafe_b64encode(digest))
|
||||
size = len(data)
|
||||
|
||||
record_path = os.path.relpath(path, bdist_dir).replace(os.path.sep, '/')
|
||||
|
||||
# On Python 2, re-encode the path as UTF-8 from the default file system encoding
|
||||
if isinstance(record_path, bytes):
|
||||
record_path = record_path.decode(sys.getfilesystemencoding()).encode('utf-8')
|
||||
|
||||
writer.writerow((record_path, hash, size))
|
97
projecten1/lib/python3.6/site-packages/wheel/egg2wheel.py
Normal file
97
projecten1/lib/python3.6/site-packages/wheel/egg2wheel.py
Normal file
@@ -0,0 +1,97 @@
|
||||
#!/usr/bin/env python
|
||||
import distutils.dist
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import zipfile
|
||||
from argparse import ArgumentParser
|
||||
from distutils.archive_util import make_archive
|
||||
from glob import iglob
|
||||
|
||||
import wheel.bdist_wheel
|
||||
from wheel.tool import WheelError
|
||||
from wheel.wininst2wheel import _bdist_wheel_tag
|
||||
|
||||
egg_info_re = re.compile(r'''
|
||||
(?P<name>.+?)-(?P<ver>.+?)
|
||||
(-(?P<pyver>py\d\.\d)
|
||||
(-(?P<arch>.+?))?
|
||||
)?.egg$''', re.VERBOSE)
|
||||
|
||||
|
||||
def egg2wheel(egg_path, dest_dir):
|
||||
filename = os.path.basename(egg_path)
|
||||
match = egg_info_re.match(filename)
|
||||
if not match:
|
||||
raise WheelError('Invalid egg file name: {}'.format(filename))
|
||||
|
||||
egg_info = match.groupdict()
|
||||
dir = tempfile.mkdtemp(suffix="_e2w")
|
||||
if os.path.isfile(egg_path):
|
||||
# assume we have a bdist_egg otherwise
|
||||
egg = zipfile.ZipFile(egg_path)
|
||||
egg.extractall(dir)
|
||||
else:
|
||||
# support buildout-style installed eggs directories
|
||||
for pth in os.listdir(egg_path):
|
||||
src = os.path.join(egg_path, pth)
|
||||
if os.path.isfile(src):
|
||||
shutil.copy2(src, dir)
|
||||
else:
|
||||
shutil.copytree(src, os.path.join(dir, pth))
|
||||
|
||||
pyver = egg_info['pyver']
|
||||
if pyver:
|
||||
pyver = pyver.replace('.', '')
|
||||
|
||||
arch = (egg_info['arch'] or 'any').replace('.', '_').replace('-', '_')
|
||||
|
||||
# assume all binary eggs are for CPython
|
||||
abi = 'cp' + pyver[2:] if arch != 'any' else 'none'
|
||||
|
||||
root_is_purelib = egg_info['arch'] is None
|
||||
if root_is_purelib:
|
||||
bw = wheel.bdist_wheel.bdist_wheel(distutils.dist.Distribution())
|
||||
else:
|
||||
bw = _bdist_wheel_tag(distutils.dist.Distribution())
|
||||
|
||||
bw.root_is_pure = root_is_purelib
|
||||
bw.python_tag = pyver
|
||||
bw.plat_name_supplied = True
|
||||
bw.plat_name = egg_info['arch'] or 'any'
|
||||
if not root_is_purelib:
|
||||
bw.full_tag_supplied = True
|
||||
bw.full_tag = (pyver, abi, arch)
|
||||
|
||||
dist_info_dir = os.path.join(dir, '{name}-{ver}.dist-info'.format(**egg_info))
|
||||
bw.egg2dist(os.path.join(dir, 'EGG-INFO'), dist_info_dir)
|
||||
bw.write_wheelfile(dist_info_dir, generator='egg2wheel')
|
||||
bw.write_record(dir, dist_info_dir)
|
||||
wheel_name = '{name}-{ver}-{pyver}-{}-{}'.format(abi, arch, **egg_info)
|
||||
filename = make_archive(os.path.join(dest_dir, wheel_name), 'zip', root_dir=dir)
|
||||
os.rename(filename, filename[:-3] + 'whl')
|
||||
shutil.rmtree(dir)
|
||||
|
||||
|
||||
def main():
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument('eggs', nargs='*', help="Eggs to convert")
|
||||
parser.add_argument('--dest-dir', '-d', default=os.path.curdir,
|
||||
help="Directory to store wheels (default %(default)s)")
|
||||
parser.add_argument('--verbose', '-v', action='store_true')
|
||||
args = parser.parse_args()
|
||||
for pat in args.eggs:
|
||||
for egg in iglob(pat):
|
||||
if args.verbose:
|
||||
print("{}... ".format(egg))
|
||||
sys.stdout.flush()
|
||||
|
||||
egg2wheel(egg, args.dest_dir)
|
||||
if args.verbose:
|
||||
print("OK")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
512
projecten1/lib/python3.6/site-packages/wheel/install.py
Normal file
512
projecten1/lib/python3.6/site-packages/wheel/install.py
Normal file
@@ -0,0 +1,512 @@
|
||||
"""
|
||||
Operations on existing wheel files, including basic installation.
|
||||
"""
|
||||
# XXX see patched pip to install
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import csv
|
||||
import hashlib
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import sys
|
||||
import warnings
|
||||
import zipfile
|
||||
|
||||
from . import signatures
|
||||
from .paths import get_install_paths
|
||||
from .pep425tags import get_supported
|
||||
from .pkginfo import read_pkg_info_bytes
|
||||
from .util import (
|
||||
urlsafe_b64encode, from_json, urlsafe_b64decode, native, binary, HashingFile, open_for_csv)
|
||||
|
||||
try:
|
||||
_big_number = sys.maxsize
|
||||
except NameError:
|
||||
_big_number = sys.maxint
|
||||
|
||||
# The next major version after this version of the 'wheel' tool:
|
||||
VERSION_TOO_HIGH = (1, 0)
|
||||
|
||||
# Non-greedy matching of an optional build number may be too clever (more
|
||||
# invalid wheel filenames will match). Separate regex for .dist-info?
|
||||
WHEEL_INFO_RE = re.compile(
|
||||
r"""^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))(-(?P<build>\d.*?))?
|
||||
-(?P<pyver>[a-z].+?)-(?P<abi>.+?)-(?P<plat>.+?)(\.whl|\.dist-info)$""",
|
||||
re.VERBOSE).match
|
||||
|
||||
|
||||
def parse_version(version):
|
||||
"""Use parse_version from pkg_resources or distutils as available."""
|
||||
global parse_version
|
||||
try:
|
||||
from pkg_resources import parse_version
|
||||
except ImportError:
|
||||
from distutils.version import LooseVersion as parse_version
|
||||
return parse_version(version)
|
||||
|
||||
|
||||
class reify(object):
|
||||
"""Put the result of a method which uses this (non-data)
|
||||
descriptor decorator in the instance dict after the first call,
|
||||
effectively replacing the decorator with an instance variable.
|
||||
"""
|
||||
|
||||
def __init__(self, wrapped):
|
||||
self.wrapped = wrapped
|
||||
self.__doc__ = wrapped.__doc__
|
||||
|
||||
def __get__(self, inst, objtype=None):
|
||||
if inst is None:
|
||||
return self
|
||||
val = self.wrapped(inst)
|
||||
setattr(inst, self.wrapped.__name__, val)
|
||||
return val
|
||||
|
||||
|
||||
class BadWheelFile(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
class WheelFile(object):
|
||||
"""Parse wheel-specific attributes from a wheel (.whl) file and offer
|
||||
basic installation and verification support.
|
||||
|
||||
WheelFile can be used to simply parse a wheel filename by avoiding the
|
||||
methods that require the actual file contents."""
|
||||
|
||||
WHEEL_INFO = "WHEEL"
|
||||
RECORD = "RECORD"
|
||||
|
||||
def __init__(self,
|
||||
filename,
|
||||
fp=None,
|
||||
append=False,
|
||||
context=get_supported):
|
||||
"""
|
||||
:param fp: A seekable file-like object or None to open(filename).
|
||||
:param append: Open archive in append mode.
|
||||
:param context: Function returning list of supported tags. Wheels
|
||||
must have the same context to be sortable.
|
||||
"""
|
||||
self.filename = filename
|
||||
self.fp = fp
|
||||
self.append = append
|
||||
self.context = context
|
||||
basename = os.path.basename(filename)
|
||||
self.parsed_filename = WHEEL_INFO_RE(basename)
|
||||
if not basename.endswith('.whl') or self.parsed_filename is None:
|
||||
raise BadWheelFile("Bad filename '%s'" % filename)
|
||||
|
||||
def __repr__(self):
|
||||
return self.filename
|
||||
|
||||
@property
|
||||
def distinfo_name(self):
|
||||
return "%s.dist-info" % self.parsed_filename.group('namever')
|
||||
|
||||
@property
|
||||
def datadir_name(self):
|
||||
return "%s.data" % self.parsed_filename.group('namever')
|
||||
|
||||
@property
|
||||
def record_name(self):
|
||||
return "%s/%s" % (self.distinfo_name, self.RECORD)
|
||||
|
||||
@property
|
||||
def wheelinfo_name(self):
|
||||
return "%s/%s" % (self.distinfo_name, self.WHEEL_INFO)
|
||||
|
||||
@property
|
||||
def tags(self):
|
||||
"""A wheel file is compatible with the Cartesian product of the
|
||||
period-delimited tags in its filename.
|
||||
To choose a wheel file among several candidates having the same
|
||||
distribution version 'ver', an installer ranks each triple of
|
||||
(pyver, abi, plat) that its Python installation can run, sorting
|
||||
the wheels by the best-ranked tag it supports and then by their
|
||||
arity which is just len(list(compatibility_tags)).
|
||||
"""
|
||||
tags = self.parsed_filename.groupdict()
|
||||
for pyver in tags['pyver'].split('.'):
|
||||
for abi in tags['abi'].split('.'):
|
||||
for plat in tags['plat'].split('.'):
|
||||
yield (pyver, abi, plat)
|
||||
|
||||
compatibility_tags = tags
|
||||
|
||||
@property
|
||||
def arity(self):
|
||||
"""The number of compatibility tags the wheel declares."""
|
||||
return len(list(self.compatibility_tags))
|
||||
|
||||
@property
|
||||
def rank(self):
|
||||
"""
|
||||
Lowest index of any of this wheel's tags in self.context(), and the
|
||||
arity e.g. (0, 1)
|
||||
"""
|
||||
return self.compatibility_rank(self.context())
|
||||
|
||||
@property
|
||||
def compatible(self):
|
||||
return self.rank[0] != _big_number # bad API!
|
||||
|
||||
# deprecated:
|
||||
def compatibility_rank(self, supported):
|
||||
"""Rank the wheel against the supported tags. Smaller ranks are more
|
||||
compatible!
|
||||
|
||||
:param supported: A list of compatibility tags that the current
|
||||
Python implemenation can run.
|
||||
"""
|
||||
preferences = []
|
||||
for tag in self.compatibility_tags:
|
||||
try:
|
||||
preferences.append(supported.index(tag))
|
||||
# Tag not present
|
||||
except ValueError:
|
||||
pass
|
||||
if len(preferences):
|
||||
return (min(preferences), self.arity)
|
||||
return (_big_number, 0)
|
||||
|
||||
# deprecated
|
||||
def supports_current_python(self, x):
|
||||
assert self.context == x, 'context mismatch'
|
||||
return self.compatible
|
||||
|
||||
# Comparability.
|
||||
# Wheels are equal if they refer to the same file.
|
||||
# If two wheels are not equal, compare based on (in this order):
|
||||
# 1. Name
|
||||
# 2. Version
|
||||
# 3. Compatibility rank
|
||||
# 4. Filename (as a tiebreaker)
|
||||
@property
|
||||
def _sort_key(self):
|
||||
return (self.parsed_filename.group('name'),
|
||||
parse_version(self.parsed_filename.group('ver')),
|
||||
tuple(-x for x in self.rank),
|
||||
self.filename)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.filename == other.filename
|
||||
|
||||
def __ne__(self, other):
|
||||
return self.filename != other.filename
|
||||
|
||||
def __lt__(self, other):
|
||||
if self.context != other.context:
|
||||
raise TypeError("{0}.context != {1}.context".format(self, other))
|
||||
|
||||
return self._sort_key < other._sort_key
|
||||
|
||||
# XXX prune
|
||||
|
||||
sn = self.parsed_filename.group('name')
|
||||
on = other.parsed_filename.group('name')
|
||||
if sn != on:
|
||||
return sn < on
|
||||
sv = parse_version(self.parsed_filename.group('ver'))
|
||||
ov = parse_version(other.parsed_filename.group('ver'))
|
||||
if sv != ov:
|
||||
return sv < ov
|
||||
# Compatibility
|
||||
if self.context != other.context:
|
||||
raise TypeError("{0}.context != {1}.context".format(self, other))
|
||||
sc = self.rank
|
||||
oc = other.rank
|
||||
if sc is not None and oc is not None and sc != oc:
|
||||
# Smaller compatibility ranks are "better" than larger ones,
|
||||
# so we have to reverse the sense of the comparison here!
|
||||
return sc > oc
|
||||
elif sc is None and oc is not None:
|
||||
return False
|
||||
return self.filename < other.filename
|
||||
|
||||
def __gt__(self, other):
|
||||
return other < self
|
||||
|
||||
def __le__(self, other):
|
||||
return self == other or self < other
|
||||
|
||||
def __ge__(self, other):
|
||||
return self == other or other < self
|
||||
|
||||
#
|
||||
# Methods using the file's contents:
|
||||
#
|
||||
|
||||
@reify
|
||||
def zipfile(self):
|
||||
mode = "r"
|
||||
if self.append:
|
||||
mode = "a"
|
||||
vzf = VerifyingZipFile(self.fp if self.fp else self.filename, mode)
|
||||
if not self.append:
|
||||
self.verify(vzf)
|
||||
return vzf
|
||||
|
||||
@reify
|
||||
def parsed_wheel_info(self):
|
||||
"""Parse wheel metadata (the .data/WHEEL file)"""
|
||||
return read_pkg_info_bytes(self.zipfile.read(self.wheelinfo_name))
|
||||
|
||||
def check_version(self):
|
||||
version = self.parsed_wheel_info['Wheel-Version']
|
||||
if tuple(map(int, version.split('.'))) >= VERSION_TOO_HIGH:
|
||||
raise ValueError("Wheel version is too high")
|
||||
|
||||
@reify
|
||||
def install_paths(self):
|
||||
"""
|
||||
Consult distutils to get the install paths for our dist. A dict with
|
||||
('purelib', 'platlib', 'headers', 'scripts', 'data').
|
||||
|
||||
We use the name from our filename as the dist name, which means headers
|
||||
could be installed in the wrong place if the filesystem-escaped name
|
||||
is different than the Name. Who cares?
|
||||
"""
|
||||
name = self.parsed_filename.group('name')
|
||||
return get_install_paths(name)
|
||||
|
||||
def install(self, force=False, overrides={}):
|
||||
"""
|
||||
Install the wheel into site-packages.
|
||||
"""
|
||||
|
||||
# Utility to get the target directory for a particular key
|
||||
def get_path(key):
|
||||
return overrides.get(key) or self.install_paths[key]
|
||||
|
||||
# The base target location is either purelib or platlib
|
||||
if self.parsed_wheel_info['Root-Is-Purelib'] == 'true':
|
||||
root = get_path('purelib')
|
||||
else:
|
||||
root = get_path('platlib')
|
||||
|
||||
# Parse all the names in the archive
|
||||
name_trans = {}
|
||||
for info in self.zipfile.infolist():
|
||||
name = info.filename
|
||||
# Zip files can contain entries representing directories.
|
||||
# These end in a '/'.
|
||||
# We ignore these, as we create directories on demand.
|
||||
if name.endswith('/'):
|
||||
continue
|
||||
|
||||
# Pathnames in a zipfile namelist are always /-separated.
|
||||
# In theory, paths could start with ./ or have other oddities
|
||||
# but this won't happen in practical cases of well-formed wheels.
|
||||
# We'll cover the simple case of an initial './' as it's both easy
|
||||
# to do and more common than most other oddities.
|
||||
if name.startswith('./'):
|
||||
name = name[2:]
|
||||
|
||||
# Split off the base directory to identify files that are to be
|
||||
# installed in non-root locations
|
||||
basedir, sep, filename = name.partition('/')
|
||||
if sep and basedir == self.datadir_name:
|
||||
# Data file. Target destination is elsewhere
|
||||
key, sep, filename = filename.partition('/')
|
||||
if not sep:
|
||||
raise ValueError("Invalid filename in wheel: {0}".format(name))
|
||||
target = get_path(key)
|
||||
else:
|
||||
# Normal file. Target destination is root
|
||||
key = ''
|
||||
target = root
|
||||
filename = name
|
||||
|
||||
# Map the actual filename from the zipfile to its intended target
|
||||
# directory and the pathname relative to that directory.
|
||||
dest = os.path.normpath(os.path.join(target, filename))
|
||||
name_trans[info] = (key, target, filename, dest)
|
||||
|
||||
# We're now ready to start processing the actual install. The process
|
||||
# is as follows:
|
||||
# 1. Prechecks - is the wheel valid, is its declared architecture
|
||||
# OK, etc. [[Responsibility of the caller]]
|
||||
# 2. Overwrite check - do any of the files to be installed already
|
||||
# exist?
|
||||
# 3. Actual install - put the files in their target locations.
|
||||
# 4. Update RECORD - write a suitably modified RECORD file to
|
||||
# reflect the actual installed paths.
|
||||
|
||||
if not force:
|
||||
for info, v in name_trans.items():
|
||||
k = info.filename
|
||||
key, target, filename, dest = v
|
||||
if os.path.exists(dest):
|
||||
raise ValueError(
|
||||
"Wheel file {0} would overwrite {1}. Use force if this is intended".format(
|
||||
k, dest))
|
||||
|
||||
# Get the name of our executable, for use when replacing script
|
||||
# wrapper hashbang lines.
|
||||
# We encode it using getfilesystemencoding, as that is "the name of
|
||||
# the encoding used to convert Unicode filenames into system file
|
||||
# names".
|
||||
exename = sys.executable.encode(sys.getfilesystemencoding())
|
||||
record_data = []
|
||||
record_name = self.distinfo_name + '/RECORD'
|
||||
for info, (key, target, filename, dest) in name_trans.items():
|
||||
name = info.filename
|
||||
source = self.zipfile.open(info)
|
||||
# Skip the RECORD file
|
||||
if name == record_name:
|
||||
continue
|
||||
ddir = os.path.dirname(dest)
|
||||
if not os.path.isdir(ddir):
|
||||
os.makedirs(ddir)
|
||||
|
||||
temp_filename = dest + '.part'
|
||||
try:
|
||||
with HashingFile(temp_filename, 'wb') as destination:
|
||||
if key == 'scripts':
|
||||
hashbang = source.readline()
|
||||
if hashbang.startswith(b'#!python'):
|
||||
hashbang = b'#!' + exename + binary(os.linesep)
|
||||
destination.write(hashbang)
|
||||
|
||||
shutil.copyfileobj(source, destination)
|
||||
except BaseException:
|
||||
if os.path.exists(temp_filename):
|
||||
os.unlink(temp_filename)
|
||||
|
||||
raise
|
||||
|
||||
os.rename(temp_filename, dest)
|
||||
reldest = os.path.relpath(dest, root)
|
||||
reldest.replace(os.sep, '/')
|
||||
record_data.append((reldest, destination.digest(), destination.length))
|
||||
destination.close()
|
||||
source.close()
|
||||
# preserve attributes (especially +x bit for scripts)
|
||||
attrs = info.external_attr >> 16
|
||||
if attrs: # tends to be 0 if Windows.
|
||||
os.chmod(dest, info.external_attr >> 16)
|
||||
|
||||
record_name = os.path.join(root, self.record_name)
|
||||
with open_for_csv(record_name, 'w+') as record_file:
|
||||
writer = csv.writer(record_file)
|
||||
for reldest, digest, length in sorted(record_data):
|
||||
writer.writerow((reldest, digest, length))
|
||||
writer.writerow((self.record_name, '', ''))
|
||||
|
||||
def verify(self, zipfile=None):
|
||||
"""Configure the VerifyingZipFile `zipfile` by verifying its signature
|
||||
and setting expected hashes for every hash in RECORD.
|
||||
Caller must complete the verification process by completely reading
|
||||
every file in the archive (e.g. with extractall)."""
|
||||
sig = None
|
||||
if zipfile is None:
|
||||
zipfile = self.zipfile
|
||||
zipfile.strict = True
|
||||
|
||||
record_name = '/'.join((self.distinfo_name, 'RECORD'))
|
||||
sig_name = '/'.join((self.distinfo_name, 'RECORD.jws'))
|
||||
# tolerate s/mime signatures:
|
||||
smime_sig_name = '/'.join((self.distinfo_name, 'RECORD.p7s'))
|
||||
zipfile.set_expected_hash(record_name, None)
|
||||
zipfile.set_expected_hash(sig_name, None)
|
||||
zipfile.set_expected_hash(smime_sig_name, None)
|
||||
record = zipfile.read(record_name)
|
||||
|
||||
record_digest = urlsafe_b64encode(hashlib.sha256(record).digest())
|
||||
try:
|
||||
sig = from_json(native(zipfile.read(sig_name)))
|
||||
except KeyError: # no signature
|
||||
pass
|
||||
if sig:
|
||||
headers, payload = signatures.verify(sig)
|
||||
if payload['hash'] != "sha256=" + native(record_digest):
|
||||
msg = "RECORD.jws claimed RECORD hash {} != computed hash {}."
|
||||
raise BadWheelFile(msg.format(payload['hash'],
|
||||
native(record_digest)))
|
||||
|
||||
reader = csv.reader((native(r, 'utf-8') for r in record.splitlines()))
|
||||
|
||||
for row in reader:
|
||||
filename = row[0]
|
||||
hash = row[1]
|
||||
if not hash:
|
||||
if filename not in (record_name, sig_name):
|
||||
print("%s has no hash!" % filename, file=sys.stderr)
|
||||
continue
|
||||
|
||||
algo, data = row[1].split('=', 1)
|
||||
assert algo == "sha256", "Unsupported hash algorithm"
|
||||
zipfile.set_expected_hash(filename, urlsafe_b64decode(binary(data)))
|
||||
|
||||
|
||||
class VerifyingZipFile(zipfile.ZipFile):
|
||||
"""ZipFile that can assert that each of its extracted contents matches
|
||||
an expected sha256 hash. Note that each file must be completely read in
|
||||
order for its hash to be checked."""
|
||||
|
||||
def __init__(self, file, mode="r",
|
||||
compression=zipfile.ZIP_STORED,
|
||||
allowZip64=True):
|
||||
super(VerifyingZipFile, self).__init__(file, mode, compression, allowZip64)
|
||||
|
||||
self.strict = False
|
||||
self._expected_hashes = {}
|
||||
self._hash_algorithm = hashlib.sha256
|
||||
|
||||
def set_expected_hash(self, name, hash):
|
||||
"""
|
||||
:param name: name of zip entry
|
||||
:param hash: bytes of hash (or None for "don't care")
|
||||
"""
|
||||
self._expected_hashes[name] = hash
|
||||
|
||||
def open(self, name_or_info, mode="r", pwd=None):
|
||||
"""Return file-like object for 'name'."""
|
||||
# A non-monkey-patched version would contain most of zipfile.py
|
||||
ef = super(VerifyingZipFile, self).open(name_or_info, mode, pwd)
|
||||
if isinstance(name_or_info, zipfile.ZipInfo):
|
||||
name = name_or_info.filename
|
||||
else:
|
||||
name = name_or_info
|
||||
|
||||
if name in self._expected_hashes and self._expected_hashes[name] is not None:
|
||||
expected_hash = self._expected_hashes[name]
|
||||
try:
|
||||
_update_crc_orig = ef._update_crc
|
||||
except AttributeError:
|
||||
warnings.warn('Need ZipExtFile._update_crc to implement '
|
||||
'file hash verification (in Python >= 2.7)')
|
||||
return ef
|
||||
running_hash = self._hash_algorithm()
|
||||
if hasattr(ef, '_eof'): # py33
|
||||
def _update_crc(data):
|
||||
_update_crc_orig(data)
|
||||
running_hash.update(data)
|
||||
if ef._eof and running_hash.digest() != expected_hash:
|
||||
raise BadWheelFile("Bad hash for file %r" % ef.name)
|
||||
else:
|
||||
def _update_crc(data, eof=None):
|
||||
_update_crc_orig(data, eof=eof)
|
||||
running_hash.update(data)
|
||||
if eof and running_hash.digest() != expected_hash:
|
||||
raise BadWheelFile("Bad hash for file %r" % ef.name)
|
||||
ef._update_crc = _update_crc
|
||||
elif self.strict and name not in self._expected_hashes:
|
||||
raise BadWheelFile("No expected hash for file %r" % ef.name)
|
||||
return ef
|
||||
|
||||
def pop(self):
|
||||
"""Truncate the last file off this zipfile.
|
||||
Assumes infolist() is in the same order as the files (true for
|
||||
ordinary zip files created by Python)"""
|
||||
if not self.fp:
|
||||
raise RuntimeError(
|
||||
"Attempt to pop from ZIP archive that was already closed")
|
||||
last = self.infolist().pop()
|
||||
del self.NameToInfo[last.filename]
|
||||
self.fp.seek(last.header_offset, os.SEEK_SET)
|
||||
self.fp.truncate()
|
||||
self._didModify = True
|
130
projecten1/lib/python3.6/site-packages/wheel/metadata.py
Normal file
130
projecten1/lib/python3.6/site-packages/wheel/metadata.py
Normal file
@@ -0,0 +1,130 @@
|
||||
"""
|
||||
Tools for converting old- to new-style metadata.
|
||||
"""
|
||||
|
||||
import os.path
|
||||
import re
|
||||
import textwrap
|
||||
from collections import namedtuple
|
||||
|
||||
import pkg_resources
|
||||
|
||||
from .pkginfo import read_pkg_info
|
||||
|
||||
# Wheel itself is probably the only program that uses non-extras markers
|
||||
# in METADATA/PKG-INFO. Support its syntax with the extra at the end only.
|
||||
EXTRA_RE = re.compile("""^(?P<package>.*?)(;\s*(?P<condition>.*?)(extra == '(?P<extra>.*?)')?)$""")
|
||||
|
||||
MayRequiresKey = namedtuple('MayRequiresKey', ('condition', 'extra'))
|
||||
|
||||
|
||||
def requires_to_requires_dist(requirement):
|
||||
"""Compose the version predicates for requirement in PEP 345 fashion."""
|
||||
requires_dist = []
|
||||
for op, ver in requirement.specs:
|
||||
requires_dist.append(op + ver)
|
||||
if not requires_dist:
|
||||
return ''
|
||||
return " (%s)" % ','.join(sorted(requires_dist))
|
||||
|
||||
|
||||
def convert_requirements(requirements):
|
||||
"""Yield Requires-Dist: strings for parsed requirements strings."""
|
||||
for req in requirements:
|
||||
parsed_requirement = pkg_resources.Requirement.parse(req)
|
||||
spec = requires_to_requires_dist(parsed_requirement)
|
||||
extras = ",".join(parsed_requirement.extras)
|
||||
if extras:
|
||||
extras = "[%s]" % extras
|
||||
yield (parsed_requirement.project_name + extras + spec)
|
||||
|
||||
|
||||
def generate_requirements(extras_require):
|
||||
"""
|
||||
Convert requirements from a setup()-style dictionary to ('Requires-Dist', 'requirement')
|
||||
and ('Provides-Extra', 'extra') tuples.
|
||||
|
||||
extras_require is a dictionary of {extra: [requirements]} as passed to setup(),
|
||||
using the empty extra {'': [requirements]} to hold install_requires.
|
||||
"""
|
||||
for extra, depends in extras_require.items():
|
||||
condition = ''
|
||||
if extra and ':' in extra: # setuptools extra:condition syntax
|
||||
extra, condition = extra.split(':', 1)
|
||||
extra = pkg_resources.safe_extra(extra)
|
||||
if extra:
|
||||
yield ('Provides-Extra', extra)
|
||||
if condition:
|
||||
condition = "(" + condition + ") and "
|
||||
condition += "extra == '%s'" % extra
|
||||
if condition:
|
||||
condition = '; ' + condition
|
||||
for new_req in convert_requirements(depends):
|
||||
yield ('Requires-Dist', new_req + condition)
|
||||
|
||||
|
||||
def pkginfo_to_metadata(egg_info_path, pkginfo_path):
|
||||
"""
|
||||
Convert .egg-info directory with PKG-INFO to the Metadata 2.1 format
|
||||
"""
|
||||
pkg_info = read_pkg_info(pkginfo_path)
|
||||
pkg_info.replace_header('Metadata-Version', '2.1')
|
||||
requires_path = os.path.join(egg_info_path, 'requires.txt')
|
||||
if os.path.exists(requires_path):
|
||||
with open(requires_path) as requires_file:
|
||||
requires = requires_file.read()
|
||||
for extra, reqs in sorted(pkg_resources.split_sections(requires),
|
||||
key=lambda x: x[0] or ''):
|
||||
for item in generate_requirements({extra: reqs}):
|
||||
pkg_info[item[0]] = item[1]
|
||||
|
||||
description = pkg_info['Description']
|
||||
if description:
|
||||
pkg_info.set_payload(dedent_description(pkg_info))
|
||||
del pkg_info['Description']
|
||||
|
||||
return pkg_info
|
||||
|
||||
|
||||
def pkginfo_unicode(pkg_info, field):
|
||||
"""Hack to coax Unicode out of an email Message() - Python 3.3+"""
|
||||
text = pkg_info[field]
|
||||
field = field.lower()
|
||||
if not isinstance(text, str):
|
||||
if not hasattr(pkg_info, 'raw_items'): # Python 3.2
|
||||
return str(text)
|
||||
for item in pkg_info.raw_items():
|
||||
if item[0].lower() == field:
|
||||
text = item[1].encode('ascii', 'surrogateescape') \
|
||||
.decode('utf-8')
|
||||
break
|
||||
|
||||
return text
|
||||
|
||||
|
||||
def dedent_description(pkg_info):
|
||||
"""
|
||||
Dedent and convert pkg_info['Description'] to Unicode.
|
||||
"""
|
||||
description = pkg_info['Description']
|
||||
|
||||
# Python 3 Unicode handling, sorta.
|
||||
surrogates = False
|
||||
if not isinstance(description, str):
|
||||
surrogates = True
|
||||
description = pkginfo_unicode(pkg_info, 'Description')
|
||||
|
||||
description_lines = description.splitlines()
|
||||
description_dedent = '\n'.join(
|
||||
# if the first line of long_description is blank,
|
||||
# the first line here will be indented.
|
||||
(description_lines[0].lstrip(),
|
||||
textwrap.dedent('\n'.join(description_lines[1:])),
|
||||
'\n'))
|
||||
|
||||
if surrogates:
|
||||
description_dedent = description_dedent \
|
||||
.encode("utf8") \
|
||||
.decode("ascii", "surrogateescape")
|
||||
|
||||
return description_dedent
|
43
projecten1/lib/python3.6/site-packages/wheel/paths.py
Normal file
43
projecten1/lib/python3.6/site-packages/wheel/paths.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""
|
||||
Installation paths.
|
||||
|
||||
Map the .data/ subdirectory names to install paths.
|
||||
"""
|
||||
|
||||
import distutils.command.install as install
|
||||
import distutils.dist as dist
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
|
||||
def get_install_command(name):
|
||||
# late binding due to potential monkeypatching
|
||||
d = dist.Distribution({'name': name})
|
||||
i = install.install(d)
|
||||
i.finalize_options()
|
||||
return i
|
||||
|
||||
|
||||
def get_install_paths(name):
|
||||
"""
|
||||
Return the (distutils) install paths for the named dist.
|
||||
|
||||
A dict with ('purelib', 'platlib', 'headers', 'scripts', 'data') keys.
|
||||
"""
|
||||
paths = {}
|
||||
|
||||
i = get_install_command(name)
|
||||
|
||||
for key in install.SCHEME_KEYS:
|
||||
paths[key] = getattr(i, 'install_' + key)
|
||||
|
||||
# pip uses a similar path as an alternative to the system's (read-only)
|
||||
# include directory:
|
||||
if hasattr(sys, 'real_prefix'): # virtualenv
|
||||
paths['headers'] = os.path.join(sys.prefix,
|
||||
'include',
|
||||
'site',
|
||||
'python' + sys.version[:3],
|
||||
name)
|
||||
|
||||
return paths
|
180
projecten1/lib/python3.6/site-packages/wheel/pep425tags.py
Normal file
180
projecten1/lib/python3.6/site-packages/wheel/pep425tags.py
Normal file
@@ -0,0 +1,180 @@
|
||||
"""Generate and work with PEP 425 Compatibility Tags."""
|
||||
|
||||
import distutils.util
|
||||
import platform
|
||||
import sys
|
||||
import sysconfig
|
||||
import warnings
|
||||
|
||||
|
||||
def get_config_var(var):
|
||||
try:
|
||||
return sysconfig.get_config_var(var)
|
||||
except IOError as e: # pip Issue #1074
|
||||
warnings.warn("{0}".format(e), RuntimeWarning)
|
||||
return None
|
||||
|
||||
|
||||
def get_abbr_impl():
|
||||
"""Return abbreviated implementation name."""
|
||||
impl = platform.python_implementation()
|
||||
if impl == 'PyPy':
|
||||
return 'pp'
|
||||
elif impl == 'Jython':
|
||||
return 'jy'
|
||||
elif impl == 'IronPython':
|
||||
return 'ip'
|
||||
elif impl == 'CPython':
|
||||
return 'cp'
|
||||
|
||||
raise LookupError('Unknown Python implementation: ' + impl)
|
||||
|
||||
|
||||
def get_impl_ver():
|
||||
"""Return implementation version."""
|
||||
impl_ver = get_config_var("py_version_nodot")
|
||||
if not impl_ver or get_abbr_impl() == 'pp':
|
||||
impl_ver = ''.join(map(str, get_impl_version_info()))
|
||||
return impl_ver
|
||||
|
||||
|
||||
def get_impl_version_info():
|
||||
"""Return sys.version_info-like tuple for use in decrementing the minor
|
||||
version."""
|
||||
if get_abbr_impl() == 'pp':
|
||||
# as per https://github.com/pypa/pip/issues/2882
|
||||
return (sys.version_info[0], sys.pypy_version_info.major,
|
||||
sys.pypy_version_info.minor)
|
||||
else:
|
||||
return sys.version_info[0], sys.version_info[1]
|
||||
|
||||
|
||||
def get_flag(var, fallback, expected=True, warn=True):
|
||||
"""Use a fallback method for determining SOABI flags if the needed config
|
||||
var is unset or unavailable."""
|
||||
val = get_config_var(var)
|
||||
if val is None:
|
||||
if warn:
|
||||
warnings.warn("Config variable '{0}' is unset, Python ABI tag may "
|
||||
"be incorrect".format(var), RuntimeWarning, 2)
|
||||
return fallback()
|
||||
return val == expected
|
||||
|
||||
|
||||
def get_abi_tag():
|
||||
"""Return the ABI tag based on SOABI (if available) or emulate SOABI
|
||||
(CPython 2, PyPy)."""
|
||||
soabi = get_config_var('SOABI')
|
||||
impl = get_abbr_impl()
|
||||
if not soabi and impl in ('cp', 'pp') and hasattr(sys, 'maxunicode'):
|
||||
d = ''
|
||||
m = ''
|
||||
u = ''
|
||||
if get_flag('Py_DEBUG',
|
||||
lambda: hasattr(sys, 'gettotalrefcount'),
|
||||
warn=(impl == 'cp')):
|
||||
d = 'd'
|
||||
if get_flag('WITH_PYMALLOC',
|
||||
lambda: impl == 'cp',
|
||||
warn=(impl == 'cp')):
|
||||
m = 'm'
|
||||
if get_flag('Py_UNICODE_SIZE',
|
||||
lambda: sys.maxunicode == 0x10ffff,
|
||||
expected=4,
|
||||
warn=(impl == 'cp' and
|
||||
sys.version_info < (3, 3))) \
|
||||
and sys.version_info < (3, 3):
|
||||
u = 'u'
|
||||
abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)
|
||||
elif soabi and soabi.startswith('cpython-'):
|
||||
abi = 'cp' + soabi.split('-')[1]
|
||||
elif soabi:
|
||||
abi = soabi.replace('.', '_').replace('-', '_')
|
||||
else:
|
||||
abi = None
|
||||
return abi
|
||||
|
||||
|
||||
def get_platform():
|
||||
"""Return our platform name 'win32', 'linux_x86_64'"""
|
||||
# XXX remove distutils dependency
|
||||
result = distutils.util.get_platform().replace('.', '_').replace('-', '_')
|
||||
if result == "linux_x86_64" and sys.maxsize == 2147483647:
|
||||
# pip pull request #3497
|
||||
result = "linux_i686"
|
||||
return result
|
||||
|
||||
|
||||
def get_supported(versions=None, supplied_platform=None):
|
||||
"""Return a list of supported tags for each version specified in
|
||||
`versions`.
|
||||
|
||||
:param versions: a list of string versions, of the form ["33", "32"],
|
||||
or None. The first version will be assumed to support our ABI.
|
||||
"""
|
||||
supported = []
|
||||
|
||||
# Versions must be given with respect to the preference
|
||||
if versions is None:
|
||||
versions = []
|
||||
version_info = get_impl_version_info()
|
||||
major = version_info[:-1]
|
||||
# Support all previous minor Python versions.
|
||||
for minor in range(version_info[-1], -1, -1):
|
||||
versions.append(''.join(map(str, major + (minor,))))
|
||||
|
||||
impl = get_abbr_impl()
|
||||
|
||||
abis = []
|
||||
|
||||
abi = get_abi_tag()
|
||||
if abi:
|
||||
abis[0:0] = [abi]
|
||||
|
||||
abi3s = set()
|
||||
import imp
|
||||
for suffix in imp.get_suffixes():
|
||||
if suffix[0].startswith('.abi'):
|
||||
abi3s.add(suffix[0].split('.', 2)[1])
|
||||
|
||||
abis.extend(sorted(list(abi3s)))
|
||||
|
||||
abis.append('none')
|
||||
|
||||
platforms = []
|
||||
if supplied_platform:
|
||||
platforms.append(supplied_platform)
|
||||
platforms.append(get_platform())
|
||||
|
||||
# Current version, current API (built specifically for our Python):
|
||||
for abi in abis:
|
||||
for arch in platforms:
|
||||
supported.append(('%s%s' % (impl, versions[0]), abi, arch))
|
||||
|
||||
# abi3 modules compatible with older version of Python
|
||||
for version in versions[1:]:
|
||||
# abi3 was introduced in Python 3.2
|
||||
if version in ('31', '30'):
|
||||
break
|
||||
for abi in abi3s: # empty set if not Python 3
|
||||
for arch in platforms:
|
||||
supported.append(("%s%s" % (impl, version), abi, arch))
|
||||
|
||||
# No abi / arch, but requires our implementation:
|
||||
for i, version in enumerate(versions):
|
||||
supported.append(('%s%s' % (impl, version), 'none', 'any'))
|
||||
if i == 0:
|
||||
# Tagged specifically as being cross-version compatible
|
||||
# (with just the major version specified)
|
||||
supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
|
||||
|
||||
# Major Python version + platform; e.g. binaries not using the Python API
|
||||
supported.append(('py%s' % (versions[0][0]), 'none', arch))
|
||||
|
||||
# No abi / arch, generic Python
|
||||
for i, version in enumerate(versions):
|
||||
supported.append(('py%s' % (version,), 'none', 'any'))
|
||||
if i == 0:
|
||||
supported.append(('py%s' % (version[0]), 'none', 'any'))
|
||||
|
||||
return supported
|
43
projecten1/lib/python3.6/site-packages/wheel/pkginfo.py
Normal file
43
projecten1/lib/python3.6/site-packages/wheel/pkginfo.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""Tools for reading and writing PKG-INFO / METADATA without caring
|
||||
about the encoding."""
|
||||
|
||||
from email.parser import Parser
|
||||
|
||||
try:
|
||||
unicode
|
||||
_PY3 = False
|
||||
except NameError:
|
||||
_PY3 = True
|
||||
|
||||
if not _PY3:
|
||||
from email.generator import Generator
|
||||
|
||||
def read_pkg_info_bytes(bytestr):
|
||||
return Parser().parsestr(bytestr)
|
||||
|
||||
def read_pkg_info(path):
|
||||
with open(path, "r") as headers:
|
||||
message = Parser().parse(headers)
|
||||
return message
|
||||
|
||||
def write_pkg_info(path, message):
|
||||
with open(path, 'w') as metadata:
|
||||
Generator(metadata, mangle_from_=False, maxheaderlen=0).flatten(message)
|
||||
else:
|
||||
from email.generator import BytesGenerator
|
||||
|
||||
def read_pkg_info_bytes(bytestr):
|
||||
headers = bytestr.decode(encoding="ascii", errors="surrogateescape")
|
||||
message = Parser().parsestr(headers)
|
||||
return message
|
||||
|
||||
def read_pkg_info(path):
|
||||
with open(path, "r",
|
||||
encoding="ascii",
|
||||
errors="surrogateescape") as headers:
|
||||
message = Parser().parse(headers)
|
||||
return message
|
||||
|
||||
def write_pkg_info(path, message):
|
||||
with open(path, "wb") as out:
|
||||
BytesGenerator(out, mangle_from_=False, maxheaderlen=0).flatten(message)
|
@@ -0,0 +1,110 @@
|
||||
"""
|
||||
Create and verify jws-js format Ed25519 signatures.
|
||||
"""
|
||||
|
||||
import json
|
||||
from ..util import urlsafe_b64decode, urlsafe_b64encode, native, binary
|
||||
|
||||
__all__ = ['sign', 'verify']
|
||||
|
||||
ed25519ll = None
|
||||
|
||||
ALG = "Ed25519"
|
||||
|
||||
|
||||
def get_ed25519ll():
|
||||
"""Lazy import-and-test of ed25519 module"""
|
||||
global ed25519ll
|
||||
|
||||
if not ed25519ll:
|
||||
try:
|
||||
import ed25519ll # fast (thousands / s)
|
||||
except (ImportError, OSError): # pragma nocover
|
||||
from . import ed25519py as ed25519ll # pure Python (hundreds / s)
|
||||
test()
|
||||
|
||||
return ed25519ll
|
||||
|
||||
|
||||
def sign(payload, keypair):
|
||||
"""Return a JWS-JS format signature given a JSON-serializable payload and
|
||||
an Ed25519 keypair."""
|
||||
get_ed25519ll()
|
||||
#
|
||||
header = {
|
||||
"alg": ALG,
|
||||
"jwk": {
|
||||
"kty": ALG, # alg -> kty in jwk-08.
|
||||
"vk": native(urlsafe_b64encode(keypair.vk))
|
||||
}
|
||||
}
|
||||
|
||||
encoded_header = urlsafe_b64encode(binary(json.dumps(header, sort_keys=True)))
|
||||
encoded_payload = urlsafe_b64encode(binary(json.dumps(payload, sort_keys=True)))
|
||||
secured_input = b".".join((encoded_header, encoded_payload))
|
||||
sig_msg = ed25519ll.crypto_sign(secured_input, keypair.sk)
|
||||
signature = sig_msg[:ed25519ll.SIGNATUREBYTES]
|
||||
encoded_signature = urlsafe_b64encode(signature)
|
||||
|
||||
return {"recipients":
|
||||
[{"header": native(encoded_header),
|
||||
"signature": native(encoded_signature)}],
|
||||
"payload": native(encoded_payload)}
|
||||
|
||||
|
||||
def assertTrue(condition, message=""):
|
||||
if not condition:
|
||||
raise ValueError(message)
|
||||
|
||||
|
||||
def verify(jwsjs):
|
||||
"""Return (decoded headers, payload) if all signatures in jwsjs are
|
||||
consistent, else raise ValueError.
|
||||
|
||||
Caller must decide whether the keys are actually trusted."""
|
||||
get_ed25519ll()
|
||||
# XXX forbid duplicate keys in JSON input using object_pairs_hook (2.7+)
|
||||
recipients = jwsjs["recipients"]
|
||||
encoded_payload = binary(jwsjs["payload"])
|
||||
headers = []
|
||||
for recipient in recipients:
|
||||
assertTrue(len(recipient) == 2, "Unknown recipient key {0}".format(recipient))
|
||||
h = binary(recipient["header"])
|
||||
s = binary(recipient["signature"])
|
||||
header = json.loads(native(urlsafe_b64decode(h)))
|
||||
assertTrue(header["alg"] == ALG,
|
||||
"Unexpected algorithm {0}".format(header["alg"]))
|
||||
if "alg" in header["jwk"] and "kty" not in header["jwk"]:
|
||||
header["jwk"]["kty"] = header["jwk"]["alg"] # b/w for JWK < -08
|
||||
assertTrue(header["jwk"]["kty"] == ALG, # true for Ed25519
|
||||
"Unexpected key type {0}".format(header["jwk"]["kty"]))
|
||||
vk = urlsafe_b64decode(binary(header["jwk"]["vk"]))
|
||||
secured_input = b".".join((h, encoded_payload))
|
||||
sig = urlsafe_b64decode(s)
|
||||
sig_msg = sig+secured_input
|
||||
verified_input = native(ed25519ll.crypto_sign_open(sig_msg, vk))
|
||||
verified_header, verified_payload = verified_input.split('.')
|
||||
verified_header = binary(verified_header)
|
||||
decoded_header = native(urlsafe_b64decode(verified_header))
|
||||
headers.append(json.loads(decoded_header))
|
||||
|
||||
verified_payload = binary(verified_payload)
|
||||
|
||||
# only return header, payload that have passed through the crypto library.
|
||||
payload = json.loads(native(urlsafe_b64decode(verified_payload)))
|
||||
|
||||
return headers, payload
|
||||
|
||||
|
||||
def test():
|
||||
kp = ed25519ll.crypto_sign_keypair()
|
||||
payload = {'test': 'onstartup'}
|
||||
jwsjs = json.loads(json.dumps(sign(payload, kp)))
|
||||
verify(jwsjs)
|
||||
jwsjs['payload'] += 'x'
|
||||
try:
|
||||
verify(jwsjs)
|
||||
except ValueError:
|
||||
pass
|
||||
else: # pragma no cover
|
||||
raise RuntimeError("No error from bad wheel.signatures payload.")
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
323
projecten1/lib/python3.6/site-packages/wheel/signatures/djbec.py
Normal file
323
projecten1/lib/python3.6/site-packages/wheel/signatures/djbec.py
Normal file
@@ -0,0 +1,323 @@
|
||||
# Ed25519 digital signatures
|
||||
# Based on https://ed25519.cr.yp.to/python/ed25519.py
|
||||
# See also https://ed25519.cr.yp.to/software.html
|
||||
# Adapted by Ron Garret
|
||||
# Sped up considerably using coordinate transforms found on:
|
||||
# https://www.hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html
|
||||
# Specifically add-2008-hwcd-4 and dbl-2008-hwcd
|
||||
|
||||
import hashlib
|
||||
import random
|
||||
|
||||
try: # pragma nocover
|
||||
unicode
|
||||
PY3 = False
|
||||
|
||||
def asbytes(b):
|
||||
"""Convert array of integers to byte string"""
|
||||
return ''.join(chr(x) for x in b)
|
||||
|
||||
def joinbytes(b):
|
||||
"""Convert array of bytes to byte string"""
|
||||
return ''.join(b)
|
||||
|
||||
def bit(h, i):
|
||||
"""Return i'th bit of bytestring h"""
|
||||
return (ord(h[i // 8]) >> (i % 8)) & 1
|
||||
except NameError: # pragma nocover
|
||||
PY3 = True
|
||||
asbytes = bytes
|
||||
joinbytes = bytes
|
||||
|
||||
def bit(h, i):
|
||||
return (h[i // 8] >> (i % 8)) & 1
|
||||
|
||||
b = 256
|
||||
q = 2 ** 255 - 19
|
||||
l = 2 ** 252 + 27742317777372353535851937790883648493 # noqa: E741
|
||||
|
||||
|
||||
def H(m):
|
||||
return hashlib.sha512(m).digest()
|
||||
|
||||
|
||||
def expmod(b, e, m):
|
||||
if e == 0:
|
||||
return 1
|
||||
|
||||
t = expmod(b, e // 2, m) ** 2 % m
|
||||
if e & 1:
|
||||
t = (t * b) % m
|
||||
|
||||
return t
|
||||
|
||||
|
||||
# Can probably get some extra speedup here by replacing this with
|
||||
# an extended-euclidean, but performance seems OK without that
|
||||
def inv(x):
|
||||
return expmod(x, q - 2, q)
|
||||
|
||||
|
||||
d = -121665 * inv(121666)
|
||||
I = expmod(2, (q - 1) // 4, q) # noqa: E741
|
||||
|
||||
|
||||
def xrecover(y):
|
||||
xx = (y * y - 1) * inv(d * y * y + 1)
|
||||
x = expmod(xx, (q + 3) // 8, q)
|
||||
if (x * x - xx) % q != 0:
|
||||
x = (x * I) % q
|
||||
|
||||
if x % 2 != 0:
|
||||
x = q - x
|
||||
|
||||
return x
|
||||
|
||||
|
||||
By = 4 * inv(5)
|
||||
Bx = xrecover(By)
|
||||
B = [Bx % q, By % q]
|
||||
|
||||
|
||||
# def edwards(P,Q):
|
||||
# x1 = P[0]
|
||||
# y1 = P[1]
|
||||
# x2 = Q[0]
|
||||
# y2 = Q[1]
|
||||
# x3 = (x1*y2+x2*y1) * inv(1+d*x1*x2*y1*y2)
|
||||
# y3 = (y1*y2+x1*x2) * inv(1-d*x1*x2*y1*y2)
|
||||
# return (x3 % q,y3 % q)
|
||||
|
||||
# def scalarmult(P,e):
|
||||
# if e == 0: return [0,1]
|
||||
# Q = scalarmult(P,e/2)
|
||||
# Q = edwards(Q,Q)
|
||||
# if e & 1: Q = edwards(Q,P)
|
||||
# return Q
|
||||
|
||||
# Faster (!) version based on:
|
||||
# https://www.hyperelliptic.org/EFD/g1p/auto-twisted-extended-1.html
|
||||
|
||||
def xpt_add(pt1, pt2):
|
||||
(X1, Y1, Z1, T1) = pt1
|
||||
(X2, Y2, Z2, T2) = pt2
|
||||
A = ((Y1 - X1) * (Y2 + X2)) % q
|
||||
B = ((Y1 + X1) * (Y2 - X2)) % q
|
||||
C = (Z1 * 2 * T2) % q
|
||||
D = (T1 * 2 * Z2) % q
|
||||
E = (D + C) % q
|
||||
F = (B - A) % q
|
||||
G = (B + A) % q
|
||||
H = (D - C) % q
|
||||
X3 = (E * F) % q
|
||||
Y3 = (G * H) % q
|
||||
Z3 = (F * G) % q
|
||||
T3 = (E * H) % q
|
||||
return (X3, Y3, Z3, T3)
|
||||
|
||||
|
||||
def xpt_double(pt):
|
||||
(X1, Y1, Z1, _) = pt
|
||||
A = (X1 * X1)
|
||||
B = (Y1 * Y1)
|
||||
C = (2 * Z1 * Z1)
|
||||
D = (-A) % q
|
||||
J = (X1 + Y1) % q
|
||||
E = (J * J - A - B) % q
|
||||
G = (D + B) % q
|
||||
F = (G - C) % q
|
||||
H = (D - B) % q
|
||||
X3 = (E * F) % q
|
||||
Y3 = (G * H) % q
|
||||
Z3 = (F * G) % q
|
||||
T3 = (E * H) % q
|
||||
return X3, Y3, Z3, T3
|
||||
|
||||
|
||||
def pt_xform(pt):
|
||||
(x, y) = pt
|
||||
return x, y, 1, (x * y) % q
|
||||
|
||||
|
||||
def pt_unxform(pt):
|
||||
(x, y, z, _) = pt
|
||||
return (x * inv(z)) % q, (y * inv(z)) % q
|
||||
|
||||
|
||||
def xpt_mult(pt, n):
|
||||
if n == 0:
|
||||
return pt_xform((0, 1))
|
||||
|
||||
_ = xpt_double(xpt_mult(pt, n >> 1))
|
||||
return xpt_add(_, pt) if n & 1 else _
|
||||
|
||||
|
||||
def scalarmult(pt, e):
|
||||
return pt_unxform(xpt_mult(pt_xform(pt), e))
|
||||
|
||||
|
||||
def encodeint(y):
|
||||
bits = [(y >> i) & 1 for i in range(b)]
|
||||
e = [(sum([bits[i * 8 + j] << j for j in range(8)]))
|
||||
for i in range(b // 8)]
|
||||
return asbytes(e)
|
||||
|
||||
|
||||
def encodepoint(P):
|
||||
x = P[0]
|
||||
y = P[1]
|
||||
bits = [(y >> i) & 1 for i in range(b - 1)] + [x & 1]
|
||||
e = [(sum([bits[i * 8 + j] << j for j in range(8)]))
|
||||
for i in range(b // 8)]
|
||||
return asbytes(e)
|
||||
|
||||
|
||||
def publickey(sk):
|
||||
h = H(sk)
|
||||
a = 2 ** (b - 2) + sum(2 ** i * bit(h, i) for i in range(3, b - 2))
|
||||
A = scalarmult(B, a)
|
||||
return encodepoint(A)
|
||||
|
||||
|
||||
def Hint(m):
|
||||
h = H(m)
|
||||
return sum(2 ** i * bit(h, i) for i in range(2 * b))
|
||||
|
||||
|
||||
def signature(m, sk, pk):
|
||||
h = H(sk)
|
||||
a = 2 ** (b - 2) + sum(2 ** i * bit(h, i) for i in range(3, b - 2))
|
||||
inter = joinbytes([h[i] for i in range(b // 8, b // 4)])
|
||||
r = Hint(inter + m)
|
||||
R = scalarmult(B, r)
|
||||
S = (r + Hint(encodepoint(R) + pk + m) * a) % l
|
||||
return encodepoint(R) + encodeint(S)
|
||||
|
||||
|
||||
def isoncurve(P):
|
||||
x = P[0]
|
||||
y = P[1]
|
||||
return (-x * x + y * y - 1 - d * x * x * y * y) % q == 0
|
||||
|
||||
|
||||
def decodeint(s):
|
||||
return sum(2 ** i * bit(s, i) for i in range(0, b))
|
||||
|
||||
|
||||
def decodepoint(s):
|
||||
y = sum(2 ** i * bit(s, i) for i in range(0, b - 1))
|
||||
x = xrecover(y)
|
||||
if x & 1 != bit(s, b - 1):
|
||||
x = q - x
|
||||
|
||||
P = [x, y]
|
||||
if not isoncurve(P):
|
||||
raise Exception("decoding point that is not on curve")
|
||||
|
||||
return P
|
||||
|
||||
|
||||
def checkvalid(s, m, pk):
|
||||
if len(s) != b // 4:
|
||||
raise Exception("signature length is wrong")
|
||||
if len(pk) != b // 8:
|
||||
raise Exception("public-key length is wrong")
|
||||
|
||||
R = decodepoint(s[0:b // 8])
|
||||
A = decodepoint(pk)
|
||||
S = decodeint(s[b // 8:b // 4])
|
||||
h = Hint(encodepoint(R) + pk + m)
|
||||
v1 = scalarmult(B, S)
|
||||
# v2 = edwards(R,scalarmult(A,h))
|
||||
v2 = pt_unxform(xpt_add(pt_xform(R), pt_xform(scalarmult(A, h))))
|
||||
return v1 == v2
|
||||
|
||||
|
||||
##########################################################
|
||||
#
|
||||
# Curve25519 reference implementation by Matthew Dempsky, from:
|
||||
# https://cr.yp.to/highspeed/naclcrypto-20090310.pdf
|
||||
|
||||
# P = 2 ** 255 - 19
|
||||
P = q
|
||||
A = 486662
|
||||
|
||||
|
||||
# def expmod(b, e, m):
|
||||
# if e == 0: return 1
|
||||
# t = expmod(b, e / 2, m) ** 2 % m
|
||||
# if e & 1: t = (t * b) % m
|
||||
# return t
|
||||
|
||||
# def inv(x): return expmod(x, P - 2, P)
|
||||
|
||||
|
||||
def add(n, m, d):
|
||||
(xn, zn) = n
|
||||
(xm, zm) = m
|
||||
(xd, zd) = d
|
||||
x = 4 * (xm * xn - zm * zn) ** 2 * zd
|
||||
z = 4 * (xm * zn - zm * xn) ** 2 * xd
|
||||
return (x % P, z % P)
|
||||
|
||||
|
||||
def double(n):
|
||||
(xn, zn) = n
|
||||
x = (xn ** 2 - zn ** 2) ** 2
|
||||
z = 4 * xn * zn * (xn ** 2 + A * xn * zn + zn ** 2)
|
||||
return (x % P, z % P)
|
||||
|
||||
|
||||
def curve25519(n, base=9):
|
||||
one = (base, 1)
|
||||
two = double(one)
|
||||
|
||||
# f(m) evaluates to a tuple
|
||||
# containing the mth multiple and the
|
||||
# (m+1)th multiple of base.
|
||||
def f(m):
|
||||
if m == 1:
|
||||
return (one, two)
|
||||
|
||||
(pm, pm1) = f(m // 2)
|
||||
if m & 1:
|
||||
return (add(pm, pm1, one), double(pm1))
|
||||
|
||||
return (double(pm), add(pm, pm1, one))
|
||||
|
||||
((x, z), _) = f(n)
|
||||
return (x * inv(z)) % P
|
||||
|
||||
|
||||
def genkey(n=0):
|
||||
n = n or random.randint(0, P)
|
||||
n &= ~7
|
||||
n &= ~(128 << 8 * 31)
|
||||
n |= 64 << 8 * 31
|
||||
return n
|
||||
|
||||
|
||||
# def str2int(s):
|
||||
# return int(hexlify(s), 16)
|
||||
# # return sum(ord(s[i]) << (8 * i) for i in range(32))
|
||||
#
|
||||
# def int2str(n):
|
||||
# return unhexlify("%x" % n)
|
||||
# # return ''.join([chr((n >> (8 * i)) & 255) for i in range(32)])
|
||||
|
||||
#################################################
|
||||
|
||||
|
||||
def dsa_test():
|
||||
import os
|
||||
msg = str(random.randint(q, q + q)).encode('utf-8')
|
||||
sk = os.urandom(32)
|
||||
pk = publickey(sk)
|
||||
sig = signature(msg, sk, pk)
|
||||
return checkvalid(sig, msg, pk)
|
||||
|
||||
|
||||
def dh_test():
|
||||
sk1 = genkey()
|
||||
sk2 = genkey()
|
||||
return curve25519(sk1, curve25519(sk2)) == curve25519(sk2, curve25519(sk1))
|
@@ -0,0 +1,50 @@
|
||||
import os
|
||||
import warnings
|
||||
from collections import namedtuple
|
||||
|
||||
from . import djbec
|
||||
|
||||
__all__ = ['crypto_sign', 'crypto_sign_open', 'crypto_sign_keypair', 'Keypair',
|
||||
'PUBLICKEYBYTES', 'SECRETKEYBYTES', 'SIGNATUREBYTES']
|
||||
|
||||
PUBLICKEYBYTES = 32
|
||||
SECRETKEYBYTES = 64
|
||||
SIGNATUREBYTES = 64
|
||||
|
||||
Keypair = namedtuple('Keypair', ('vk', 'sk')) # verifying key, secret key
|
||||
|
||||
|
||||
def crypto_sign_keypair(seed=None):
|
||||
"""Return (verifying, secret) key from a given seed, or os.urandom(32)"""
|
||||
if seed is None:
|
||||
seed = os.urandom(PUBLICKEYBYTES)
|
||||
else:
|
||||
warnings.warn("ed25519ll should choose random seed.",
|
||||
RuntimeWarning)
|
||||
if len(seed) != 32:
|
||||
raise ValueError("seed must be 32 random bytes or None.")
|
||||
skbytes = seed
|
||||
vkbytes = djbec.publickey(skbytes)
|
||||
return Keypair(vkbytes, skbytes+vkbytes)
|
||||
|
||||
|
||||
def crypto_sign(msg, sk):
|
||||
"""Return signature+message given message and secret key.
|
||||
The signature is the first SIGNATUREBYTES bytes of the return value.
|
||||
A copy of msg is in the remainder."""
|
||||
if len(sk) != SECRETKEYBYTES:
|
||||
raise ValueError("Bad signing key length %d" % len(sk))
|
||||
vkbytes = sk[PUBLICKEYBYTES:]
|
||||
skbytes = sk[:PUBLICKEYBYTES]
|
||||
sig = djbec.signature(msg, skbytes, vkbytes)
|
||||
return sig + msg
|
||||
|
||||
|
||||
def crypto_sign_open(signed, vk):
|
||||
"""Return message given signature+message and the verifying key."""
|
||||
if len(vk) != PUBLICKEYBYTES:
|
||||
raise ValueError("Bad verifying key length %d" % len(vk))
|
||||
rc = djbec.checkvalid(signed[:SIGNATUREBYTES], signed[SIGNATUREBYTES:], vk)
|
||||
if not rc:
|
||||
raise ValueError("rc != True", rc)
|
||||
return signed[SIGNATUREBYTES:]
|
101
projecten1/lib/python3.6/site-packages/wheel/signatures/keys.py
Normal file
101
projecten1/lib/python3.6/site-packages/wheel/signatures/keys.py
Normal file
@@ -0,0 +1,101 @@
|
||||
"""Store and retrieve wheel signing / verifying keys.
|
||||
|
||||
Given a scope (a package name, + meaning "all packages", or - meaning
|
||||
"no packages"), return a list of verifying keys that are trusted for that
|
||||
scope.
|
||||
|
||||
Given a package name, return a list of (scope, key) suggested keys to sign
|
||||
that package (only the verifying keys; the private signing key is stored
|
||||
elsewhere).
|
||||
|
||||
Keys here are represented as urlsafe_b64encoded strings with no padding.
|
||||
|
||||
Tentative command line interface:
|
||||
|
||||
# list trusts
|
||||
wheel trust
|
||||
# trust a particular key for all
|
||||
wheel trust + key
|
||||
# trust key for beaglevote
|
||||
wheel trust beaglevote key
|
||||
# stop trusting a key for all
|
||||
wheel untrust + key
|
||||
|
||||
# generate a key pair
|
||||
wheel keygen
|
||||
|
||||
# import a signing key from a file
|
||||
wheel import keyfile
|
||||
|
||||
# export a signing key
|
||||
wheel export key
|
||||
"""
|
||||
|
||||
import json
|
||||
import os.path
|
||||
|
||||
from ..util import native, load_config_paths, save_config_path
|
||||
|
||||
|
||||
class WheelKeys(object):
|
||||
SCHEMA = 1
|
||||
CONFIG_NAME = 'wheel.json'
|
||||
|
||||
def __init__(self):
|
||||
self.data = {'signers': [], 'verifiers': []}
|
||||
|
||||
def load(self):
|
||||
# XXX JSON is not a great database
|
||||
for path in load_config_paths('wheel'):
|
||||
conf = os.path.join(native(path), self.CONFIG_NAME)
|
||||
if os.path.exists(conf):
|
||||
with open(conf, 'r') as infile:
|
||||
self.data = json.load(infile)
|
||||
for x in ('signers', 'verifiers'):
|
||||
if x not in self.data:
|
||||
self.data[x] = []
|
||||
if 'schema' not in self.data:
|
||||
self.data['schema'] = self.SCHEMA
|
||||
elif self.data['schema'] != self.SCHEMA:
|
||||
raise ValueError(
|
||||
"Bad wheel.json version {0}, expected {1}".format(
|
||||
self.data['schema'], self.SCHEMA))
|
||||
break
|
||||
return self
|
||||
|
||||
def save(self):
|
||||
# Try not to call this a very long time after load()
|
||||
path = save_config_path('wheel')
|
||||
conf = os.path.join(native(path), self.CONFIG_NAME)
|
||||
with open(conf, 'w+') as out:
|
||||
json.dump(self.data, out, indent=2)
|
||||
return self
|
||||
|
||||
def trust(self, scope, vk):
|
||||
"""Start trusting a particular key for given scope."""
|
||||
self.data['verifiers'].append({'scope': scope, 'vk': vk})
|
||||
return self
|
||||
|
||||
def untrust(self, scope, vk):
|
||||
"""Stop trusting a particular key for given scope."""
|
||||
self.data['verifiers'].remove({'scope': scope, 'vk': vk})
|
||||
return self
|
||||
|
||||
def trusted(self, scope=None):
|
||||
"""Return list of [(scope, trusted key), ...] for given scope."""
|
||||
trust = [(x['scope'], x['vk']) for x in self.data['verifiers']
|
||||
if x['scope'] in (scope, '+')]
|
||||
trust.sort(key=lambda x: x[0])
|
||||
trust.reverse()
|
||||
return trust
|
||||
|
||||
def signers(self, scope):
|
||||
"""Return list of signing key(s)."""
|
||||
sign = [(x['scope'], x['vk']) for x in self.data['signers'] if x['scope'] in (scope, '+')]
|
||||
sign.sort(key=lambda x: x[0])
|
||||
sign.reverse()
|
||||
return sign
|
||||
|
||||
def add_signer(self, scope, vk):
|
||||
"""Remember verifying key vk as being valid for signing in scope."""
|
||||
self.data['signers'].append({'scope': scope, 'vk': vk})
|
387
projecten1/lib/python3.6/site-packages/wheel/tool/__init__.py
Normal file
387
projecten1/lib/python3.6/site-packages/wheel/tool/__init__.py
Normal file
@@ -0,0 +1,387 @@
|
||||
"""
|
||||
Wheel command-line utility.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from glob import iglob
|
||||
|
||||
from .. import signatures
|
||||
from ..install import WheelFile, VerifyingZipFile
|
||||
from ..paths import get_install_command
|
||||
from ..util import urlsafe_b64decode, urlsafe_b64encode, native, binary, matches_requirement
|
||||
|
||||
|
||||
def require_pkgresources(name):
|
||||
try:
|
||||
import pkg_resources # noqa: F401
|
||||
except ImportError:
|
||||
raise RuntimeError("'{0}' needs pkg_resources (part of setuptools).".format(name))
|
||||
|
||||
|
||||
class WheelError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
# For testability
|
||||
def get_keyring():
|
||||
try:
|
||||
from ..signatures import keys
|
||||
import keyring
|
||||
assert keyring.get_keyring().priority
|
||||
except (ImportError, AssertionError):
|
||||
raise WheelError(
|
||||
"Install wheel[signatures] (requires keyring, keyrings.alt, pyxdg) for signatures.")
|
||||
|
||||
return keys.WheelKeys, keyring
|
||||
|
||||
|
||||
def warn_signatures():
|
||||
print('WARNING: The wheel signing and signature verification commands have been deprecated '
|
||||
'and will be removed before the v1.0.0 release.', file=sys.stderr)
|
||||
|
||||
|
||||
def keygen(get_keyring=get_keyring):
|
||||
"""Generate a public/private key pair."""
|
||||
warn_signatures()
|
||||
WheelKeys, keyring = get_keyring()
|
||||
|
||||
ed25519ll = signatures.get_ed25519ll()
|
||||
|
||||
wk = WheelKeys().load()
|
||||
|
||||
keypair = ed25519ll.crypto_sign_keypair()
|
||||
vk = native(urlsafe_b64encode(keypair.vk))
|
||||
sk = native(urlsafe_b64encode(keypair.sk))
|
||||
kr = keyring.get_keyring()
|
||||
kr.set_password("wheel", vk, sk)
|
||||
print("Created Ed25519 keypair with vk={}".format(vk))
|
||||
print("in {!r}".format(kr))
|
||||
|
||||
sk2 = kr.get_password('wheel', vk)
|
||||
if sk2 != sk:
|
||||
raise WheelError("Keyring is broken. Could not retrieve secret key.")
|
||||
|
||||
print("Trusting {} to sign and verify all packages.".format(vk))
|
||||
wk.add_signer('+', vk)
|
||||
wk.trust('+', vk)
|
||||
wk.save()
|
||||
|
||||
|
||||
def sign(wheelfile, replace=False, get_keyring=get_keyring):
|
||||
"""Sign a wheel"""
|
||||
warn_signatures()
|
||||
WheelKeys, keyring = get_keyring()
|
||||
|
||||
ed25519ll = signatures.get_ed25519ll()
|
||||
|
||||
wf = WheelFile(wheelfile, append=True)
|
||||
wk = WheelKeys().load()
|
||||
|
||||
name = wf.parsed_filename.group('name')
|
||||
sign_with = wk.signers(name)[0]
|
||||
print("Signing {} with {}".format(name, sign_with[1]))
|
||||
|
||||
vk = sign_with[1]
|
||||
kr = keyring.get_keyring()
|
||||
sk = kr.get_password('wheel', vk)
|
||||
keypair = ed25519ll.Keypair(urlsafe_b64decode(binary(vk)),
|
||||
urlsafe_b64decode(binary(sk)))
|
||||
|
||||
record_name = wf.distinfo_name + '/RECORD'
|
||||
sig_name = wf.distinfo_name + '/RECORD.jws'
|
||||
if sig_name in wf.zipfile.namelist():
|
||||
raise WheelError("Wheel is already signed.")
|
||||
record_data = wf.zipfile.read(record_name)
|
||||
payload = {"hash": "sha256=" + native(urlsafe_b64encode(hashlib.sha256(record_data).digest()))}
|
||||
sig = signatures.sign(payload, keypair)
|
||||
wf.zipfile.writestr(sig_name, json.dumps(sig, sort_keys=True))
|
||||
wf.zipfile.close()
|
||||
|
||||
|
||||
def unsign(wheelfile):
|
||||
"""
|
||||
Remove RECORD.jws from a wheel by truncating the zip file.
|
||||
|
||||
RECORD.jws must be at the end of the archive. The zip file must be an
|
||||
ordinary archive, with the compressed files and the directory in the same
|
||||
order, and without any non-zip content after the truncation point.
|
||||
"""
|
||||
warn_signatures()
|
||||
vzf = VerifyingZipFile(wheelfile, "a")
|
||||
info = vzf.infolist()
|
||||
if not (len(info) and info[-1].filename.endswith('/RECORD.jws')):
|
||||
raise WheelError('The wheel is not signed (RECORD.jws not found at end of the archive).')
|
||||
vzf.pop()
|
||||
vzf.close()
|
||||
|
||||
|
||||
def verify(wheelfile):
|
||||
"""Verify a wheel.
|
||||
|
||||
The signature will be verified for internal consistency ONLY and printed.
|
||||
Wheel's own unpack/install commands verify the manifest against the
|
||||
signature and file contents.
|
||||
"""
|
||||
warn_signatures()
|
||||
wf = WheelFile(wheelfile)
|
||||
sig_name = wf.distinfo_name + '/RECORD.jws'
|
||||
try:
|
||||
sig = json.loads(native(wf.zipfile.open(sig_name).read()))
|
||||
except KeyError:
|
||||
raise WheelError('The wheel is not signed (RECORD.jws not found at end of the archive).')
|
||||
|
||||
verified = signatures.verify(sig)
|
||||
print("Signatures are internally consistent.", file=sys.stderr)
|
||||
print(json.dumps(verified, indent=2))
|
||||
|
||||
|
||||
def unpack(wheelfile, dest='.'):
|
||||
"""Unpack a wheel.
|
||||
|
||||
Wheel content will be unpacked to {dest}/{name}-{ver}, where {name}
|
||||
is the package name and {ver} its version.
|
||||
|
||||
:param wheelfile: The path to the wheel.
|
||||
:param dest: Destination directory (default to current directory).
|
||||
"""
|
||||
wf = WheelFile(wheelfile)
|
||||
namever = wf.parsed_filename.group('namever')
|
||||
destination = os.path.join(dest, namever)
|
||||
print("Unpacking to: %s" % (destination), file=sys.stderr)
|
||||
wf.zipfile.extractall(destination)
|
||||
wf.zipfile.close()
|
||||
|
||||
|
||||
def install(requirements, requirements_file=None,
|
||||
wheel_dirs=None, force=False, list_files=False,
|
||||
dry_run=False):
|
||||
"""Install wheels.
|
||||
|
||||
:param requirements: A list of requirements or wheel files to install.
|
||||
:param requirements_file: A file containing requirements to install.
|
||||
:param wheel_dirs: A list of directories to search for wheels.
|
||||
:param force: Install a wheel file even if it is not compatible.
|
||||
:param list_files: Only list the files to install, don't install them.
|
||||
:param dry_run: Do everything but the actual install.
|
||||
"""
|
||||
|
||||
# If no wheel directories specified, use the WHEELPATH environment
|
||||
# variable, or the current directory if that is not set.
|
||||
if not wheel_dirs:
|
||||
wheelpath = os.getenv("WHEELPATH")
|
||||
if wheelpath:
|
||||
wheel_dirs = wheelpath.split(os.pathsep)
|
||||
else:
|
||||
wheel_dirs = [os.path.curdir]
|
||||
|
||||
# Get a list of all valid wheels in wheel_dirs
|
||||
all_wheels = []
|
||||
for d in wheel_dirs:
|
||||
for w in os.listdir(d):
|
||||
if w.endswith('.whl'):
|
||||
wf = WheelFile(os.path.join(d, w))
|
||||
if wf.compatible:
|
||||
all_wheels.append(wf)
|
||||
|
||||
# If there is a requirements file, add it to the list of requirements
|
||||
if requirements_file:
|
||||
# If the file doesn't exist, search for it in wheel_dirs
|
||||
# This allows standard requirements files to be stored with the
|
||||
# wheels.
|
||||
if not os.path.exists(requirements_file):
|
||||
for d in wheel_dirs:
|
||||
name = os.path.join(d, requirements_file)
|
||||
if os.path.exists(name):
|
||||
requirements_file = name
|
||||
break
|
||||
|
||||
with open(requirements_file) as fd:
|
||||
requirements.extend(fd)
|
||||
|
||||
to_install = []
|
||||
for req in requirements:
|
||||
if req.endswith('.whl'):
|
||||
# Explicitly specified wheel filename
|
||||
if os.path.exists(req):
|
||||
wf = WheelFile(req)
|
||||
if wf.compatible or force:
|
||||
to_install.append(wf)
|
||||
else:
|
||||
msg = ("{0} is not compatible with this Python. "
|
||||
"--force to install anyway.".format(req))
|
||||
raise WheelError(msg)
|
||||
else:
|
||||
# We could search on wheel_dirs, but it's probably OK to
|
||||
# assume the user has made an error.
|
||||
raise WheelError("No such wheel file: {}".format(req))
|
||||
continue
|
||||
|
||||
# We have a requirement spec
|
||||
# If we don't have pkg_resources, this will raise an exception
|
||||
matches = matches_requirement(req, all_wheels)
|
||||
if not matches:
|
||||
raise WheelError("No match for requirement {}".format(req))
|
||||
to_install.append(max(matches))
|
||||
|
||||
# We now have a list of wheels to install
|
||||
if list_files:
|
||||
print("Installing:")
|
||||
|
||||
if dry_run:
|
||||
return
|
||||
|
||||
for wf in to_install:
|
||||
if list_files:
|
||||
print(" {}".format(wf.filename))
|
||||
continue
|
||||
wf.install(force=force)
|
||||
wf.zipfile.close()
|
||||
|
||||
|
||||
def install_scripts(distributions):
|
||||
"""
|
||||
Regenerate the entry_points console_scripts for the named distribution.
|
||||
"""
|
||||
try:
|
||||
from setuptools.command import easy_install
|
||||
import pkg_resources
|
||||
except ImportError:
|
||||
raise RuntimeError("'wheel install_scripts' needs setuptools.")
|
||||
|
||||
for dist in distributions:
|
||||
pkg_resources_dist = pkg_resources.get_distribution(dist)
|
||||
install = get_install_command(dist)
|
||||
command = easy_install.easy_install(install.distribution)
|
||||
command.args = ['wheel'] # dummy argument
|
||||
command.finalize_options()
|
||||
command.install_egg_scripts(pkg_resources_dist)
|
||||
|
||||
|
||||
def convert(installers, dest_dir, verbose):
|
||||
require_pkgresources('wheel convert')
|
||||
|
||||
# Only support wheel convert if pkg_resources is present
|
||||
from ..wininst2wheel import bdist_wininst2wheel
|
||||
from ..egg2wheel import egg2wheel
|
||||
|
||||
for pat in installers:
|
||||
for installer in iglob(pat):
|
||||
if os.path.splitext(installer)[1] == '.egg':
|
||||
conv = egg2wheel
|
||||
else:
|
||||
conv = bdist_wininst2wheel
|
||||
if verbose:
|
||||
print("{}... ".format(installer))
|
||||
sys.stdout.flush()
|
||||
conv(installer, dest_dir)
|
||||
if verbose:
|
||||
print("OK")
|
||||
|
||||
|
||||
def parser():
|
||||
p = argparse.ArgumentParser()
|
||||
s = p.add_subparsers(help="commands")
|
||||
|
||||
def keygen_f(args):
|
||||
keygen()
|
||||
keygen_parser = s.add_parser('keygen', help='Generate signing key')
|
||||
keygen_parser.set_defaults(func=keygen_f)
|
||||
|
||||
def sign_f(args):
|
||||
sign(args.wheelfile)
|
||||
sign_parser = s.add_parser('sign', help='Sign wheel')
|
||||
sign_parser.add_argument('wheelfile', help='Wheel file')
|
||||
sign_parser.set_defaults(func=sign_f)
|
||||
|
||||
def unsign_f(args):
|
||||
unsign(args.wheelfile)
|
||||
unsign_parser = s.add_parser('unsign', help=unsign.__doc__)
|
||||
unsign_parser.add_argument('wheelfile', help='Wheel file')
|
||||
unsign_parser.set_defaults(func=unsign_f)
|
||||
|
||||
def verify_f(args):
|
||||
verify(args.wheelfile)
|
||||
verify_parser = s.add_parser('verify', help=verify.__doc__)
|
||||
verify_parser.add_argument('wheelfile', help='Wheel file')
|
||||
verify_parser.set_defaults(func=verify_f)
|
||||
|
||||
def unpack_f(args):
|
||||
unpack(args.wheelfile, args.dest)
|
||||
unpack_parser = s.add_parser('unpack', help='Unpack wheel')
|
||||
unpack_parser.add_argument('--dest', '-d', help='Destination directory',
|
||||
default='.')
|
||||
unpack_parser.add_argument('wheelfile', help='Wheel file')
|
||||
unpack_parser.set_defaults(func=unpack_f)
|
||||
|
||||
def install_f(args):
|
||||
install(args.requirements, args.requirements_file,
|
||||
args.wheel_dirs, args.force, args.list_files)
|
||||
install_parser = s.add_parser('install', help='Install wheels')
|
||||
install_parser.add_argument('requirements', nargs='*',
|
||||
help='Requirements to install.')
|
||||
install_parser.add_argument('--force', default=False,
|
||||
action='store_true',
|
||||
help='Install incompatible wheel files.')
|
||||
install_parser.add_argument('--wheel-dir', '-d', action='append',
|
||||
dest='wheel_dirs',
|
||||
help='Directories containing wheels.')
|
||||
install_parser.add_argument('--requirements-file', '-r',
|
||||
help="A file containing requirements to "
|
||||
"install.")
|
||||
install_parser.add_argument('--list', '-l', default=False,
|
||||
dest='list_files',
|
||||
action='store_true',
|
||||
help="List wheels which would be installed, "
|
||||
"but don't actually install anything.")
|
||||
install_parser.set_defaults(func=install_f)
|
||||
|
||||
def install_scripts_f(args):
|
||||
install_scripts(args.distributions)
|
||||
install_scripts_parser = s.add_parser('install-scripts', help='Install console_scripts')
|
||||
install_scripts_parser.add_argument('distributions', nargs='*',
|
||||
help='Regenerate console_scripts for these distributions')
|
||||
install_scripts_parser.set_defaults(func=install_scripts_f)
|
||||
|
||||
def convert_f(args):
|
||||
convert(args.installers, args.dest_dir, args.verbose)
|
||||
convert_parser = s.add_parser('convert', help='Convert egg or wininst to wheel')
|
||||
convert_parser.add_argument('installers', nargs='*', help='Installers to convert')
|
||||
convert_parser.add_argument('--dest-dir', '-d', default=os.path.curdir,
|
||||
help="Directory to store wheels (default %(default)s)")
|
||||
convert_parser.add_argument('--verbose', '-v', action='store_true')
|
||||
convert_parser.set_defaults(func=convert_f)
|
||||
|
||||
def version_f(args):
|
||||
from .. import __version__
|
||||
print("wheel %s" % __version__)
|
||||
version_parser = s.add_parser('version', help='Print version and exit')
|
||||
version_parser.set_defaults(func=version_f)
|
||||
|
||||
def help_f(args):
|
||||
p.print_help()
|
||||
help_parser = s.add_parser('help', help='Show this help')
|
||||
help_parser.set_defaults(func=help_f)
|
||||
|
||||
return p
|
||||
|
||||
|
||||
def main():
|
||||
p = parser()
|
||||
args = p.parse_args()
|
||||
if not hasattr(args, 'func'):
|
||||
p.print_help()
|
||||
else:
|
||||
# XXX on Python 3.3 we get 'args has no func' rather than short help.
|
||||
try:
|
||||
args.func(args)
|
||||
return 0
|
||||
except WheelError as e:
|
||||
print(e, file=sys.stderr)
|
||||
|
||||
return 1
|
Binary file not shown.
156
projecten1/lib/python3.6/site-packages/wheel/util.py
Normal file
156
projecten1/lib/python3.6/site-packages/wheel/util.py
Normal file
@@ -0,0 +1,156 @@
|
||||
"""Utility functions."""
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
|
||||
__all__ = ['urlsafe_b64encode', 'urlsafe_b64decode', 'utf8',
|
||||
'to_json', 'from_json', 'matches_requirement']
|
||||
|
||||
|
||||
# For encoding ascii back and forth between bytestrings, as is repeatedly
|
||||
# necessary in JSON-based crypto under Python 3
|
||||
if sys.version_info[0] < 3:
|
||||
text_type = unicode # noqa: F821
|
||||
|
||||
def native(s, encoding='ascii'):
|
||||
return s
|
||||
else:
|
||||
text_type = str
|
||||
|
||||
def native(s, encoding='ascii'):
|
||||
if isinstance(s, bytes):
|
||||
return s.decode(encoding)
|
||||
return s
|
||||
|
||||
|
||||
def urlsafe_b64encode(data):
|
||||
"""urlsafe_b64encode without padding"""
|
||||
return base64.urlsafe_b64encode(data).rstrip(binary('='))
|
||||
|
||||
|
||||
def urlsafe_b64decode(data):
|
||||
"""urlsafe_b64decode without padding"""
|
||||
pad = b'=' * (4 - (len(data) & 3))
|
||||
return base64.urlsafe_b64decode(data + pad)
|
||||
|
||||
|
||||
def to_json(o):
|
||||
"""Convert given data to JSON."""
|
||||
return json.dumps(o, sort_keys=True)
|
||||
|
||||
|
||||
def from_json(j):
|
||||
"""Decode a JSON payload."""
|
||||
return json.loads(j)
|
||||
|
||||
|
||||
def open_for_csv(name, mode):
|
||||
if sys.version_info[0] < 3:
|
||||
kwargs = {}
|
||||
mode += 'b'
|
||||
else:
|
||||
kwargs = {'newline': '', 'encoding': 'utf-8'}
|
||||
|
||||
return open(name, mode, **kwargs)
|
||||
|
||||
|
||||
def utf8(data):
|
||||
"""Utf-8 encode data."""
|
||||
if isinstance(data, text_type):
|
||||
return data.encode('utf-8')
|
||||
return data
|
||||
|
||||
|
||||
def binary(s):
|
||||
if isinstance(s, text_type):
|
||||
return s.encode('ascii')
|
||||
return s
|
||||
|
||||
|
||||
class HashingFile(object):
|
||||
def __init__(self, path, mode, hashtype='sha256'):
|
||||
self.fd = open(path, mode)
|
||||
self.hashtype = hashtype
|
||||
self.hash = hashlib.new(hashtype)
|
||||
self.length = 0
|
||||
|
||||
def write(self, data):
|
||||
self.hash.update(data)
|
||||
self.length += len(data)
|
||||
self.fd.write(data)
|
||||
|
||||
def close(self):
|
||||
self.fd.close()
|
||||
|
||||
def digest(self):
|
||||
if self.hashtype == 'md5':
|
||||
return self.hash.hexdigest()
|
||||
digest = self.hash.digest()
|
||||
return self.hashtype + '=' + native(urlsafe_b64encode(digest))
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
self.fd.close()
|
||||
|
||||
|
||||
if sys.platform == 'win32':
|
||||
import ctypes.wintypes
|
||||
# CSIDL_APPDATA for reference - not used here for compatibility with
|
||||
# dirspec, which uses LOCAL_APPDATA and COMMON_APPDATA in that order
|
||||
csidl = {'CSIDL_APPDATA': 26, 'CSIDL_LOCAL_APPDATA': 28, 'CSIDL_COMMON_APPDATA': 35}
|
||||
|
||||
def get_path(name):
|
||||
SHGFP_TYPE_CURRENT = 0
|
||||
buf = ctypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH)
|
||||
ctypes.windll.shell32.SHGetFolderPathW(0, csidl[name], 0, SHGFP_TYPE_CURRENT, buf)
|
||||
return buf.value
|
||||
|
||||
def save_config_path(*resource):
|
||||
appdata = get_path("CSIDL_LOCAL_APPDATA")
|
||||
path = os.path.join(appdata, *resource)
|
||||
if not os.path.isdir(path):
|
||||
os.makedirs(path)
|
||||
return path
|
||||
|
||||
def load_config_paths(*resource):
|
||||
ids = ["CSIDL_LOCAL_APPDATA", "CSIDL_COMMON_APPDATA"]
|
||||
for id in ids:
|
||||
base = get_path(id)
|
||||
path = os.path.join(base, *resource)
|
||||
if os.path.exists(path):
|
||||
yield path
|
||||
else:
|
||||
def save_config_path(*resource):
|
||||
import xdg.BaseDirectory
|
||||
return xdg.BaseDirectory.save_config_path(*resource)
|
||||
|
||||
def load_config_paths(*resource):
|
||||
import xdg.BaseDirectory
|
||||
return xdg.BaseDirectory.load_config_paths(*resource)
|
||||
|
||||
|
||||
def matches_requirement(req, wheels):
|
||||
"""List of wheels matching a requirement.
|
||||
|
||||
:param req: The requirement to satisfy
|
||||
:param wheels: List of wheels to search.
|
||||
"""
|
||||
try:
|
||||
from pkg_resources import Distribution, Requirement
|
||||
except ImportError:
|
||||
raise RuntimeError("Cannot use requirements without pkg_resources")
|
||||
|
||||
req = Requirement.parse(req)
|
||||
|
||||
selected = []
|
||||
for wf in wheels:
|
||||
f = wf.parsed_filename
|
||||
dist = Distribution(project_name=f.group("name"), version=f.group("ver"))
|
||||
if dist in req:
|
||||
selected.append(wf)
|
||||
return selected
|
219
projecten1/lib/python3.6/site-packages/wheel/wininst2wheel.py
Normal file
219
projecten1/lib/python3.6/site-packages/wheel/wininst2wheel.py
Normal file
@@ -0,0 +1,219 @@
|
||||
#!/usr/bin/env python
|
||||
import distutils.dist
|
||||
import os.path
|
||||
import re
|
||||
import sys
|
||||
import tempfile
|
||||
import zipfile
|
||||
from argparse import ArgumentParser
|
||||
from glob import iglob
|
||||
from shutil import rmtree
|
||||
|
||||
import wheel.bdist_wheel
|
||||
from wheel.archive import archive_wheelfile
|
||||
|
||||
egg_info_re = re.compile(r'''(^|/)(?P<name>[^/]+?)-(?P<ver>.+?)
|
||||
(-(?P<pyver>.+?))?(-(?P<arch>.+?))?.egg-info(/|$)''', re.VERBOSE)
|
||||
|
||||
|
||||
def parse_info(wininfo_name, egginfo_name):
|
||||
"""Extract metadata from filenames.
|
||||
|
||||
Extracts the 4 metadataitems needed (name, version, pyversion, arch) from
|
||||
the installer filename and the name of the egg-info directory embedded in
|
||||
the zipfile (if any).
|
||||
|
||||
The egginfo filename has the format::
|
||||
|
||||
name-ver(-pyver)(-arch).egg-info
|
||||
|
||||
The installer filename has the format::
|
||||
|
||||
name-ver.arch(-pyver).exe
|
||||
|
||||
Some things to note:
|
||||
|
||||
1. The installer filename is not definitive. An installer can be renamed
|
||||
and work perfectly well as an installer. So more reliable data should
|
||||
be used whenever possible.
|
||||
2. The egg-info data should be preferred for the name and version, because
|
||||
these come straight from the distutils metadata, and are mandatory.
|
||||
3. The pyver from the egg-info data should be ignored, as it is
|
||||
constructed from the version of Python used to build the installer,
|
||||
which is irrelevant - the installer filename is correct here (even to
|
||||
the point that when it's not there, any version is implied).
|
||||
4. The architecture must be taken from the installer filename, as it is
|
||||
not included in the egg-info data.
|
||||
5. Architecture-neutral installers still have an architecture because the
|
||||
installer format itself (being executable) is architecture-specific. We
|
||||
should therefore ignore the architecture if the content is pure-python.
|
||||
"""
|
||||
|
||||
egginfo = None
|
||||
if egginfo_name:
|
||||
egginfo = egg_info_re.search(egginfo_name)
|
||||
if not egginfo:
|
||||
raise ValueError("Egg info filename %s is not valid" % (egginfo_name,))
|
||||
|
||||
# Parse the wininst filename
|
||||
# 1. Distribution name (up to the first '-')
|
||||
w_name, sep, rest = wininfo_name.partition('-')
|
||||
if not sep:
|
||||
raise ValueError("Installer filename %s is not valid" % (wininfo_name,))
|
||||
|
||||
# Strip '.exe'
|
||||
rest = rest[:-4]
|
||||
# 2. Python version (from the last '-', must start with 'py')
|
||||
rest2, sep, w_pyver = rest.rpartition('-')
|
||||
if sep and w_pyver.startswith('py'):
|
||||
rest = rest2
|
||||
w_pyver = w_pyver.replace('.', '')
|
||||
else:
|
||||
# Not version specific - use py2.py3. While it is possible that
|
||||
# pure-Python code is not compatible with both Python 2 and 3, there
|
||||
# is no way of knowing from the wininst format, so we assume the best
|
||||
# here (the user can always manually rename the wheel to be more
|
||||
# restrictive if needed).
|
||||
w_pyver = 'py2.py3'
|
||||
# 3. Version and architecture
|
||||
w_ver, sep, w_arch = rest.rpartition('.')
|
||||
if not sep:
|
||||
raise ValueError("Installer filename %s is not valid" % (wininfo_name,))
|
||||
|
||||
if egginfo:
|
||||
w_name = egginfo.group('name')
|
||||
w_ver = egginfo.group('ver')
|
||||
|
||||
return {'name': w_name, 'ver': w_ver, 'arch': w_arch, 'pyver': w_pyver}
|
||||
|
||||
|
||||
def bdist_wininst2wheel(path, dest_dir=os.path.curdir):
|
||||
bdw = zipfile.ZipFile(path)
|
||||
|
||||
# Search for egg-info in the archive
|
||||
egginfo_name = None
|
||||
for filename in bdw.namelist():
|
||||
if '.egg-info' in filename:
|
||||
egginfo_name = filename
|
||||
break
|
||||
|
||||
info = parse_info(os.path.basename(path), egginfo_name)
|
||||
|
||||
root_is_purelib = True
|
||||
for zipinfo in bdw.infolist():
|
||||
if zipinfo.filename.startswith('PLATLIB'):
|
||||
root_is_purelib = False
|
||||
break
|
||||
if root_is_purelib:
|
||||
paths = {'purelib': ''}
|
||||
else:
|
||||
paths = {'platlib': ''}
|
||||
|
||||
dist_info = "%(name)s-%(ver)s" % info
|
||||
datadir = "%s.data/" % dist_info
|
||||
|
||||
# rewrite paths to trick ZipFile into extracting an egg
|
||||
# XXX grab wininst .ini - between .exe, padding, and first zip file.
|
||||
members = []
|
||||
egginfo_name = ''
|
||||
for zipinfo in bdw.infolist():
|
||||
key, basename = zipinfo.filename.split('/', 1)
|
||||
key = key.lower()
|
||||
basepath = paths.get(key, None)
|
||||
if basepath is None:
|
||||
basepath = datadir + key.lower() + '/'
|
||||
oldname = zipinfo.filename
|
||||
newname = basepath + basename
|
||||
zipinfo.filename = newname
|
||||
del bdw.NameToInfo[oldname]
|
||||
bdw.NameToInfo[newname] = zipinfo
|
||||
# Collect member names, but omit '' (from an entry like "PLATLIB/"
|
||||
if newname:
|
||||
members.append(newname)
|
||||
# Remember egg-info name for the egg2dist call below
|
||||
if not egginfo_name:
|
||||
if newname.endswith('.egg-info'):
|
||||
egginfo_name = newname
|
||||
elif '.egg-info/' in newname:
|
||||
egginfo_name, sep, _ = newname.rpartition('/')
|
||||
dir = tempfile.mkdtemp(suffix="_b2w")
|
||||
bdw.extractall(dir, members)
|
||||
|
||||
# egg2wheel
|
||||
abi = 'none'
|
||||
pyver = info['pyver']
|
||||
arch = (info['arch'] or 'any').replace('.', '_').replace('-', '_')
|
||||
# Wininst installers always have arch even if they are not
|
||||
# architecture-specific (because the format itself is).
|
||||
# So, assume the content is architecture-neutral if root is purelib.
|
||||
if root_is_purelib:
|
||||
arch = 'any'
|
||||
# If the installer is architecture-specific, it's almost certainly also
|
||||
# CPython-specific.
|
||||
if arch != 'any':
|
||||
pyver = pyver.replace('py', 'cp')
|
||||
wheel_name = '-'.join((
|
||||
dist_info,
|
||||
pyver,
|
||||
abi,
|
||||
arch
|
||||
))
|
||||
if root_is_purelib:
|
||||
bw = wheel.bdist_wheel.bdist_wheel(distutils.dist.Distribution())
|
||||
else:
|
||||
bw = _bdist_wheel_tag(distutils.dist.Distribution())
|
||||
|
||||
bw.root_is_pure = root_is_purelib
|
||||
bw.python_tag = pyver
|
||||
bw.plat_name_supplied = True
|
||||
bw.plat_name = info['arch'] or 'any'
|
||||
|
||||
if not root_is_purelib:
|
||||
bw.full_tag_supplied = True
|
||||
bw.full_tag = (pyver, abi, arch)
|
||||
|
||||
dist_info_dir = os.path.join(dir, '%s.dist-info' % dist_info)
|
||||
bw.egg2dist(os.path.join(dir, egginfo_name), dist_info_dir)
|
||||
bw.write_wheelfile(dist_info_dir, generator='wininst2wheel')
|
||||
bw.write_record(dir, dist_info_dir)
|
||||
|
||||
archive_wheelfile(os.path.join(dest_dir, wheel_name), dir)
|
||||
rmtree(dir)
|
||||
|
||||
|
||||
class _bdist_wheel_tag(wheel.bdist_wheel.bdist_wheel):
|
||||
# allow the client to override the default generated wheel tag
|
||||
# The default bdist_wheel implementation uses python and abi tags
|
||||
# of the running python process. This is not suitable for
|
||||
# generating/repackaging prebuild binaries.
|
||||
|
||||
full_tag_supplied = False
|
||||
full_tag = None # None or a (pytag, soabitag, plattag) triple
|
||||
|
||||
def get_tag(self):
|
||||
if self.full_tag_supplied and self.full_tag is not None:
|
||||
return self.full_tag
|
||||
else:
|
||||
return super(_bdist_wheel_tag, self).get_tag()
|
||||
|
||||
|
||||
def main():
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument('installers', nargs='*', help="Installers to convert")
|
||||
parser.add_argument('--dest-dir', '-d', default=os.path.curdir,
|
||||
help="Directory to store wheels (default %(default)s)")
|
||||
parser.add_argument('--verbose', '-v', action='store_true')
|
||||
args = parser.parse_args()
|
||||
for pat in args.installers:
|
||||
for installer in iglob(pat):
|
||||
if args.verbose:
|
||||
print("{}... ".format(installer))
|
||||
sys.stdout.flush()
|
||||
|
||||
bdist_wininst2wheel(installer, args.dest_dir)
|
||||
if args.verbose:
|
||||
print("OK")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
Reference in New Issue
Block a user