You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

169 lines
7.1 KiB

4 years ago
  1. from __future__ import print_function
  2. import csv
  3. import hashlib
  4. import os.path
  5. import re
  6. import stat
  7. import time
  8. from collections import OrderedDict
  9. from distutils import log as logger
  10. from zipfile import ZIP_DEFLATED, ZipInfo, ZipFile
  11. from wheel.cli import WheelError
  12. from wheel.util import urlsafe_b64decode, as_unicode, native, urlsafe_b64encode, as_bytes, StringIO
  13. # Non-greedy matching of an optional build number may be too clever (more
  14. # invalid wheel filenames will match). Separate regex for .dist-info?
  15. WHEEL_INFO_RE = re.compile(
  16. r"""^(?P<namever>(?P<name>.+?)-(?P<ver>.+?))(-(?P<build>\d[^-]*))?
  17. -(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?)\.whl$""",
  18. re.VERBOSE)
  19. def get_zipinfo_datetime(timestamp=None):
  20. # Some applications need reproducible .whl files, but they can't do this without forcing
  21. # the timestamp of the individual ZipInfo objects. See issue #143.
  22. timestamp = int(os.environ.get('SOURCE_DATE_EPOCH', timestamp or time.time()))
  23. return time.gmtime(timestamp)[0:6]
  24. class WheelFile(ZipFile):
  25. """A ZipFile derivative class that also reads SHA-256 hashes from
  26. .dist-info/RECORD and checks any read files against those.
  27. """
  28. _default_algorithm = hashlib.sha256
  29. def __init__(self, file, mode='r', compression=ZIP_DEFLATED):
  30. basename = os.path.basename(file)
  31. self.parsed_filename = WHEEL_INFO_RE.match(basename)
  32. if not basename.endswith('.whl') or self.parsed_filename is None:
  33. raise WheelError("Bad wheel filename {!r}".format(basename))
  34. ZipFile.__init__(self, file, mode, compression=compression, allowZip64=True)
  35. self.dist_info_path = '{}.dist-info'.format(self.parsed_filename.group('namever'))
  36. self.record_path = self.dist_info_path + '/RECORD'
  37. self._file_hashes = OrderedDict()
  38. self._file_sizes = {}
  39. if mode == 'r':
  40. # Ignore RECORD and any embedded wheel signatures
  41. self._file_hashes[self.record_path] = None, None
  42. self._file_hashes[self.record_path + '.jws'] = None, None
  43. self._file_hashes[self.record_path + '.p7s'] = None, None
  44. # Fill in the expected hashes by reading them from RECORD
  45. try:
  46. record = self.open(self.record_path)
  47. except KeyError:
  48. raise WheelError('Missing {} file'.format(self.record_path))
  49. with record:
  50. for line in record:
  51. line = line.decode('utf-8')
  52. path, hash_sum, size = line.rsplit(u',', 2)
  53. if hash_sum:
  54. algorithm, hash_sum = hash_sum.split(u'=')
  55. try:
  56. hashlib.new(algorithm)
  57. except ValueError:
  58. raise WheelError('Unsupported hash algorithm: {}'.format(algorithm))
  59. if algorithm.lower() in {'md5', 'sha1'}:
  60. raise WheelError(
  61. 'Weak hash algorithm ({}) is not permitted by PEP 427'
  62. .format(algorithm))
  63. self._file_hashes[path] = (
  64. algorithm, urlsafe_b64decode(hash_sum.encode('ascii')))
  65. def open(self, name_or_info, mode="r", pwd=None):
  66. def _update_crc(newdata, eof=None):
  67. if eof is None:
  68. eof = ef._eof
  69. update_crc_orig(newdata)
  70. else: # Python 2
  71. update_crc_orig(newdata, eof)
  72. running_hash.update(newdata)
  73. if eof and running_hash.digest() != expected_hash:
  74. raise WheelError("Hash mismatch for file '{}'".format(native(ef_name)))
  75. ef = ZipFile.open(self, name_or_info, mode, pwd)
  76. ef_name = as_unicode(name_or_info.filename if isinstance(name_or_info, ZipInfo)
  77. else name_or_info)
  78. if mode == 'r' and not ef_name.endswith('/'):
  79. if ef_name not in self._file_hashes:
  80. raise WheelError("No hash found for file '{}'".format(native(ef_name)))
  81. algorithm, expected_hash = self._file_hashes[ef_name]
  82. if expected_hash is not None:
  83. # Monkey patch the _update_crc method to also check for the hash from RECORD
  84. running_hash = hashlib.new(algorithm)
  85. update_crc_orig, ef._update_crc = ef._update_crc, _update_crc
  86. return ef
  87. def write_files(self, base_dir):
  88. logger.info("creating '%s' and adding '%s' to it", self.filename, base_dir)
  89. deferred = []
  90. for root, dirnames, filenames in os.walk(base_dir):
  91. # Sort the directory names so that `os.walk` will walk them in a
  92. # defined order on the next iteration.
  93. dirnames.sort()
  94. for name in sorted(filenames):
  95. path = os.path.normpath(os.path.join(root, name))
  96. if os.path.isfile(path):
  97. arcname = os.path.relpath(path, base_dir).replace(os.path.sep, '/')
  98. if arcname == self.record_path:
  99. pass
  100. elif root.endswith('.dist-info'):
  101. deferred.append((path, arcname))
  102. else:
  103. self.write(path, arcname)
  104. deferred.sort()
  105. for path, arcname in deferred:
  106. self.write(path, arcname)
  107. def write(self, filename, arcname=None, compress_type=None):
  108. with open(filename, 'rb') as f:
  109. st = os.fstat(f.fileno())
  110. data = f.read()
  111. zinfo = ZipInfo(arcname or filename, date_time=get_zipinfo_datetime(st.st_mtime))
  112. zinfo.external_attr = (stat.S_IMODE(st.st_mode) | stat.S_IFMT(st.st_mode)) << 16
  113. zinfo.compress_type = compress_type or self.compression
  114. self.writestr(zinfo, data, compress_type)
  115. def writestr(self, zinfo_or_arcname, bytes, compress_type=None):
  116. ZipFile.writestr(self, zinfo_or_arcname, bytes, compress_type)
  117. fname = (zinfo_or_arcname.filename if isinstance(zinfo_or_arcname, ZipInfo)
  118. else zinfo_or_arcname)
  119. logger.info("adding '%s'", fname)
  120. if fname != self.record_path:
  121. hash_ = self._default_algorithm(bytes)
  122. self._file_hashes[fname] = hash_.name, native(urlsafe_b64encode(hash_.digest()))
  123. self._file_sizes[fname] = len(bytes)
  124. def close(self):
  125. # Write RECORD
  126. if self.fp is not None and self.mode == 'w' and self._file_hashes:
  127. data = StringIO()
  128. writer = csv.writer(data, delimiter=',', quotechar='"', lineterminator='\n')
  129. writer.writerows((
  130. (
  131. fname,
  132. algorithm + "=" + hash_,
  133. self._file_sizes[fname]
  134. )
  135. for fname, (algorithm, hash_) in self._file_hashes.items()
  136. ))
  137. writer.writerow((format(self.record_path), "", ""))
  138. zinfo = ZipInfo(native(self.record_path), date_time=get_zipinfo_datetime())
  139. zinfo.compress_type = self.compression
  140. zinfo.external_attr = 0o664 << 16
  141. self.writestr(zinfo, as_bytes(data.getvalue()))
  142. ZipFile.close(self)