diff --git a/swh/loader/package/debian.py b/swh/loader/package/debian.py index cc8d9f5..b472505 100644 --- a/swh/loader/package/debian.py +++ b/swh/loader/package/debian.py @@ -1,356 +1,356 @@ # Copyright (C) 2017-2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import email.utils import iso8601 import logging import re import subprocess from dateutil.parser import parse as parse_date from debian.changelog import Changelog from debian.deb822 import Dsc from os import path from typing import ( Any, Dict, Generator, List, Mapping, Optional, Sequence, Tuple ) from swh.loader.package.loader import PackageLoader -from swh.loader.package.utils import download +from swh.loader.package.utils import download, release_name logger = logging.getLogger(__name__) UPLOADERS_SPLIT = re.compile(r'(?<=\>)\s*,\s*') class DebianLoader(PackageLoader): """Load debian origins into swh archive. """ visit_type = 'debian' def __init__(self, url: str, date: str, packages: Mapping[str, Any]): super().__init__(url=url) self.packages = packages def get_versions(self) -> Sequence[str]: """Returns the keys of the packages input (e.g. stretch/contrib/0.7.2-3, etc...) """ return self.packages.keys() def get_default_version(self) -> str: """Take the first version as default release """ return list(self.packages.keys())[0] def get_package_info(self, version: str) -> Generator[ Tuple[str, Mapping[str, Any]], None, None]: meta = self.packages[version] p_info = meta.copy() p_info['raw'] = meta - yield 'releases/%s' % version, p_info + yield release_name(version), p_info def resolve_revision_from( self, known_package_artifacts: Dict, artifact_metadata: Dict) \ -> Optional[bytes]: artifacts_to_fetch = artifact_metadata['files'] logger.debug('k_p_artifacts: %s', known_package_artifacts) logger.debug('artifacts_to_fetch: %s', artifacts_to_fetch) for rev_id, known_artifacts in known_package_artifacts.items(): logger.debug('Revision: %s', rev_id) logger.debug('Associated known_artifacts: %s', known_artifacts) known_artifacts = known_artifacts['extrinsic']['raw']['files'] rev_found = True for a_name, k_artifact in known_artifacts.items(): artifact_to_fetch = artifacts_to_fetch.get(a_name) logger.debug('artifact_to_fetch: %s', artifact_to_fetch) if artifact_to_fetch is None: # as soon as we do not see an artifact, we consider we need # to check the other revision rev_found = False if k_artifact['sha256'] != artifact_to_fetch['sha256']: # Hash is different, we consider we need to check the other # revisions rev_found = False if rev_found: logger.debug('Existing revision %s found for new artifacts.', rev_id) return rev_id # if we pass here, we did not find any known artifacts logger.debug('No existing revision found for the new artifacts.') def download_package(self, p_info: Mapping[str, Any], tmpdir: str) -> [Tuple[str, Dict]]: """Contrary to other package loaders (1 package, 1 artifact), `a_metadata` represents the package's datafiles set to fetch: - .orig.tar.gz - .dsc - .diff.gz This is delegated to the `download_package` function. """ all_hashes = download_package(p_info, tmpdir) logger.debug('all_hashes: %s', all_hashes) res = [] for hashes in all_hashes.values(): res.append((tmpdir, hashes)) logger.debug('res: %s', res) return res def uncompress(self, dl_artifacts: [Tuple[str, Dict]], dest: str) -> str: logger.debug('dl_artifacts: %s', dl_artifacts) return extract_package(dl_artifacts, dest=dest) def build_revision(self, a_metadata: Mapping[str, Any], uncompressed_path: str) -> Dict: dsc_url, dsc_name = dsc_information(a_metadata) dsc_path = path.join(path.dirname(uncompressed_path), dsc_name) i_metadata = get_package_metadata( a_metadata, dsc_path, uncompressed_path) logger.debug('i_metadata: %s', i_metadata) logger.debug('a_metadata: %s', a_metadata) msg = 'Synthetic revision for Debian source package %s version %s' % ( a_metadata['name'], a_metadata['version']) date = iso8601.parse_date(i_metadata['changelog']['date']) author = prepare_person(i_metadata['changelog']['person']) # inspired from swh.loader.debian.converters.package_metadata_to_revision # noqa return { 'type': 'dsc', 'message': msg.encode('utf-8'), 'author': author, 'date': date, 'committer': author, 'committer_date': date, 'parents': [], 'metadata': { 'intrinsic': { 'tool': 'dsc', 'raw': i_metadata, }, 'extrinsic': { 'provider': dsc_url, 'when': self.visit_date.isoformat(), 'raw': a_metadata, }, } } def uid_to_person(uid: str) -> Mapping[str, str]: """Convert an uid to a person suitable for insertion. Args: uid: an uid of the form "Name " Returns: a dictionary with the following keys: - name: the name associated to the uid - email: the mail associated to the uid - fullname: the actual uid input """ logger.debug('uid: %s', uid) ret = { 'name': '', 'email': '', 'fullname': uid, } name, mail = email.utils.parseaddr(uid) if name and email: ret['name'] = name ret['email'] = mail else: ret['name'] = uid return ret def prepare_person(person: Mapping[str, str]) -> Mapping[str, bytes]: """Prepare person for swh serialization... Args: A person dict Returns: A person dict ready for storage """ ret = {} for key, value in person.items(): ret[key] = value.encode('utf-8') return ret def download_package( package: Mapping[str, Any], tmpdir: Any) -> Mapping[str, Any]: """Fetch a source package in a temporary directory and check the checksums for all files. Args: package: Dict defining the set of files representing a debian package tmpdir: Where to download and extract the files to ingest Returns: Dict of swh hashes per filename key """ all_hashes = {} for filename, fileinfo in package['files'].items(): uri = fileinfo['uri'] logger.debug('fileinfo: %s', fileinfo) extrinsic_hashes = {'sha256': fileinfo['sha256']} logger.debug('extrinsic_hashes(%s): %s', filename, extrinsic_hashes) filepath, hashes = download(uri, dest=tmpdir, filename=filename, hashes=extrinsic_hashes) all_hashes[filename] = hashes logger.debug('all_hashes: %s', all_hashes) return all_hashes def dsc_information(package: Mapping[str, Any]) -> Tuple[str, str]: """Retrieve dsc information from a package. Args: package: Package metadata information Returns: Tuple of dsc file's uri, dsc's full disk path """ dsc_name = None dsc_url = None for filename, fileinfo in package['files'].items(): if filename.endswith('.dsc'): if dsc_name: raise ValueError( 'Package %s_%s references several dsc files' % (package['name'], package['version']) ) dsc_url = fileinfo['uri'] dsc_name = filename return dsc_url, dsc_name def extract_package(dl_artifacts: List[Tuple[str, Dict]], dest: str) -> str: """Extract a Debian source package to a given directory. Note that after extraction the target directory will be the root of the extracted package, rather than containing it. Args: package: package information dictionary dest: directory where the package files are stored Returns: Package extraction directory """ a_path = dl_artifacts[0][0] logger.debug('dl_artifacts: %s', dl_artifacts) for _, hashes in dl_artifacts: logger.debug('hashes: %s', hashes) filename = hashes['filename'] if filename.endswith('.dsc'): dsc_name = filename break dsc_path = path.join(a_path, dsc_name) destdir = path.join(dest, 'extracted') logfile = path.join(dest, 'extract.log') logger.debug('extract Debian source package %s in %s' % (dsc_path, destdir), extra={ 'swh_type': 'deb_extract', 'swh_dsc': dsc_path, 'swh_destdir': destdir, }) cmd = ['dpkg-source', '--no-copy', '--no-check', '--ignore-bad-version', '-x', dsc_path, destdir] try: with open(logfile, 'w') as stdout: subprocess.check_call(cmd, stdout=stdout, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: logdata = open(logfile, 'r').read() raise ValueError('dpkg-source exited with code %s: %s' % (e.returncode, logdata)) from None return destdir def get_package_metadata(package: Mapping[str, Any], dsc_path: str, extracted_path: str) -> Mapping[str, Any]: """Get the package metadata from the source package at dsc_path, extracted in extracted_path. Args: package: the package dict (with a dsc_path key) dsc_path: path to the package's dsc file extracted_path: the path where the package got extracted Returns: dict: a dictionary with the following keys: - history: list of (package_name, package_version) tuples parsed from the package changelog """ with open(dsc_path, 'rb') as dsc: parsed_dsc = Dsc(dsc) # Parse the changelog to retrieve the rest of the package information changelog_path = path.join(extracted_path, 'debian/changelog') with open(changelog_path, 'rb') as changelog: try: parsed_changelog = Changelog(changelog) except UnicodeDecodeError: logger.warning('Unknown encoding for changelog %s,' ' falling back to iso' % changelog_path.decode('utf-8'), extra={ 'swh_type': 'deb_changelog_encoding', 'swh_name': package['name'], 'swh_version': str(package['version']), 'swh_changelog': changelog_path.decode('utf-8'), }) # need to reset as Changelog scrolls to the end of the file changelog.seek(0) parsed_changelog = Changelog(changelog, encoding='iso-8859-15') package_info = { 'name': package['name'], 'version': str(package['version']), 'changelog': { 'person': uid_to_person(parsed_changelog.author), 'date': parse_date(parsed_changelog.date).isoformat(), 'history': [(block.package, str(block.version)) for block in parsed_changelog][1:], } } maintainers = [ uid_to_person(parsed_dsc['Maintainer']), ] maintainers.extend( uid_to_person(person) for person in UPLOADERS_SPLIT.split(parsed_dsc.get('Uploaders', '')) ) package_info['maintainers'] = maintainers return package_info diff --git a/swh/loader/package/gnu.py b/swh/loader/package/gnu.py index 4d6d2a1..63d73d2 100644 --- a/swh/loader/package/gnu.py +++ b/swh/loader/package/gnu.py @@ -1,195 +1,196 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import logging import re from os import path from typing import Any, Dict, Generator, Mapping, Optional, Sequence, Tuple from swh.loader.package.loader import PackageLoader +from swh.loader.package.utils import release_name from swh.model.identifiers import normalize_timestamp logger = logging.getLogger(__name__) # to recognize existing naming pattern extensions = [ 'zip', 'tar', 'gz', 'tgz', 'bz2', 'bzip2', 'lzma', 'lz', 'xz', 'Z', ] version_keywords = [ 'cygwin_me', 'w32', 'win32', 'nt', 'cygwin', 'mingw', 'latest', 'alpha', 'beta', 'release', 'stable', 'hppa', 'solaris', 'sunos', 'sun4u', 'sparc', 'sun', 'aix', 'ibm', 'rs6000', 'i386', 'i686', 'linux', 'redhat', 'linuxlibc', 'mips', 'powerpc', 'macos', 'apple', 'darwin', 'macosx', 'powermacintosh', 'unknown', 'netbsd', 'freebsd', 'sgi', 'irix', ] # Match a filename into components. # # We use Debian's release number heuristic: A release number starts # with a digit, and is followed by alphanumeric characters or any of # ., +, :, ~ and - # # We hardcode a list of possible extensions, as this release number # scheme would match them too... We match on any combination of those. # # Greedy matching is done right to left (we only match the extension # greedily with +, software_name and release_number are matched lazily # with +? and *?). pattern = r''' ^ (?: # We have a software name and a release number, separated with a # -, _ or dot. (?P.+?[-_.]) (?P(%(vkeywords)s|[0-9][0-9a-zA-Z_.+:~-]*?)+) | # We couldn't match a release number, put everything in the # software name. (?P.+?) ) (?P(?:\.(?:%(extensions)s))+) $ ''' % { 'extensions': '|'.join(extensions), 'vkeywords': '|'.join('%s[-]?' % k for k in version_keywords), } def get_version(url: str) -> str: """Extract branch name from tarball url Args: url (str): Tarball URL Returns: byte: Branch name Example: For url = https://ftp.gnu.org/gnu/8sync/8sync-0.2.0.tar.gz >>> get_version(url) '0.2.0' """ filename = path.split(url)[-1] m = re.match(pattern, filename, flags=re.VERBOSE | re.IGNORECASE) if m: d = m.groupdict() if d['software_name1'] and d['release_number']: return d['release_number'] if d['software_name2']: return d['software_name2'] return '' class GNULoader(PackageLoader): visit_type = 'gnu' SWH_PERSON = { 'name': b'Software Heritage', 'fullname': b'Software Heritage', 'email': b'robot@softwareheritage.org' } REVISION_MESSAGE = b'swh-loader-package: synthetic revision message' def __init__(self, package_url: str, tarballs: Sequence): """Loader constructor. For now, this is the lister's task output. Args: package_url: Origin url tarballs: List of dict with keys `date` (date) and `archive` (str) the url to retrieve one versioned archive """ super().__init__(url=package_url) self.tarballs = list(sorted(tarballs, key=lambda v: v['time'])) def get_versions(self) -> Sequence[str]: versions = [] for archive in self.tarballs: v = get_version(archive['archive']) if v: versions.append(v) return versions def get_default_version(self) -> str: # It's the most recent, so for this loader, it's the last one return get_version(self.tarballs[-1]['archive']) def get_package_info(self, version: str) -> Generator[ Tuple[str, Mapping[str, Any]], None, None]: for a_metadata in self.tarballs: url = a_metadata['archive'] package_version = get_version(url) if version == package_version: p_info = { 'url': url, 'filename': path.split(url)[-1], 'raw': a_metadata, } # FIXME: this code assumes we have only 1 artifact per # versioned package - yield 'releases/%s' % version, p_info + yield release_name(version), p_info def resolve_revision_from( self, known_artifacts: Dict, artifact_metadata: Dict) \ -> Optional[bytes]: def pk(d): return [d.get(k) for k in ['time', 'archive', 'length']] artifact_pk = pk(artifact_metadata) for rev_id, known_artifact in known_artifacts.items(): logging.debug('known_artifact: %s', known_artifact) known_pk = pk(known_artifact['extrinsic']['raw']) if artifact_pk == known_pk: return rev_id def build_revision( self, a_metadata: Mapping[str, Any], uncompressed_path: str) -> Dict: normalized_date = normalize_timestamp(int(a_metadata['time'])) return { 'type': 'tar', 'message': self.REVISION_MESSAGE, 'date': normalized_date, 'author': self.SWH_PERSON, 'committer': self.SWH_PERSON, 'committer_date': normalized_date, 'parents': [], 'metadata': { 'intrinsic': {}, 'extrinsic': { 'provider': self.url, 'when': self.visit_date.isoformat(), 'raw': a_metadata, }, }, } diff --git a/swh/loader/package/npm.py b/swh/loader/package/npm.py index db865cd..678b8ab 100644 --- a/swh/loader/package/npm.py +++ b/swh/loader/package/npm.py @@ -1,295 +1,295 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import logging import os import re from codecs import BOM_UTF8 from typing import Any, Dict, Generator, Mapping, Sequence, Tuple, Optional import chardet import iso8601 from swh.model.identifiers import normalize_timestamp from swh.loader.package.loader import PackageLoader -from swh.loader.package.utils import api_info +from swh.loader.package.utils import api_info, release_name logger = logging.getLogger(__name__) _EMPTY_AUTHOR = {'fullname': b'', 'name': None, 'email': None} # https://github.com/jonschlinkert/author-regex _author_regexp = r'([^<(]+?)?[ \t]*(?:<([^>(]+?)>)?[ \t]*(?:\(([^)]+?)\)|$)' def parse_npm_package_author(author_str): """ Parse npm package author string. It works with a flexible range of formats, as detailed below:: name name (url) name (url) name (url) name(url) name (url) name (url) name(url) name(url) name (url) name(url) name name (url) (url) (url) (url) (url) Args: author_str (str): input author string Returns: dict: A dict that may contain the following keys: * name * email * url """ author = {} matches = re.findall(_author_regexp, author_str.replace('<>', '').replace('()', ''), re.M) for match in matches: if match[0].strip(): author['name'] = match[0].strip() if match[1].strip(): author['email'] = match[1].strip() if match[2].strip(): author['url'] = match[2].strip() return author def extract_npm_package_author(package_json): """ Extract package author from a ``package.json`` file content and return it in swh format. Args: package_json (dict): Dict holding the content of parsed ``package.json`` file Returns: dict: A dict with the following keys: * fullname * name * email """ def _author_str(author_data): if type(author_data) is dict: author_str = '' if 'name' in author_data: author_str += author_data['name'] if 'email' in author_data: author_str += ' <%s>' % author_data['email'] return author_str elif type(author_data) is list: return _author_str(author_data[0]) if len(author_data) > 0 else '' else: return author_data author_data = {} for author_key in ('author', 'authors'): if author_key in package_json: author_str = _author_str(package_json[author_key]) author_data = parse_npm_package_author(author_str) name = author_data.get('name') email = author_data.get('email') fullname = None if name and email: fullname = '%s <%s>' % (name, email) elif name: fullname = name if not fullname: return _EMPTY_AUTHOR if fullname: fullname = fullname.encode('utf-8') if name: name = name.encode('utf-8') if email: email = email.encode('utf-8') return {'fullname': fullname, 'name': name, 'email': email} def _lstrip_bom(s, bom=BOM_UTF8): if s.startswith(bom): return s[len(bom):] else: return s def load_json(json_bytes): """ Try to load JSON from bytes and return a dictionary. First try to decode from utf-8. If the decoding failed, try to detect the encoding and decode again with replace error handling. If JSON is malformed, an empty dictionary will be returned. Args: json_bytes (bytes): binary content of a JSON file Returns: dict: JSON data loaded in a dictionary """ json_data = {} try: json_str = _lstrip_bom(json_bytes).decode('utf-8') except UnicodeDecodeError: encoding = chardet.detect(json_bytes)['encoding'] if encoding: json_str = json_bytes.decode(encoding, 'replace') try: json_data = json.loads(json_str) except json.decoder.JSONDecodeError: pass return json_data def extract_intrinsic_metadata(dir_path: str) -> Dict: """Given an uncompressed path holding the pkginfo file, returns a pkginfo parsed structure as a dict. The release artifact contains at their root one folder. For example: $ tar tvf zprint-0.0.6.tar.gz drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/ ... Args: dir_path (str): Path to the uncompressed directory representing a release artifact from npm. Returns: the pkginfo parsed structure as a dict if any or None if none was present. """ # Retrieve the root folder of the archive if not os.path.exists(dir_path): return {} lst = os.listdir(dir_path) if len(lst) == 0: return {} project_dirname = lst[0] package_json_path = os.path.join(dir_path, project_dirname, 'package.json') if not os.path.exists(package_json_path): return {} with open(package_json_path, 'rb') as package_json_file: package_json_bytes = package_json_file.read() return load_json(package_json_bytes) class NpmLoader(PackageLoader): visit_type = 'npm' def __init__(self, package_name, package_url, package_metadata_url): super().__init__(url=package_url) self.provider_url = package_metadata_url self._info = None self._versions = None # if package_url is None: # package_url = 'https://www.npmjs.com/package/%s' % package_name # if package_metadata_url is None: # package_metadata_url = 'https://replicate.npmjs.com/%s/' %\ # quote(package_name, safe='') @property def info(self) -> Dict: """Return the project metadata information (fetched from npm registry) """ if not self._info: self._info = api_info(self.provider_url) return self._info def get_versions(self) -> Sequence[str]: return sorted(list(self.info['versions'].keys())) def get_default_version(self) -> str: return self.info['dist-tags'].get('latest', '') def get_package_info(self, version: str) -> Generator[ Tuple[str, Mapping[str, Any]], None, None]: meta = self.info['versions'][version] url = meta['dist']['tarball'] p_info = { 'url': url, 'filename': os.path.basename(url), 'raw': meta, } - yield 'releases/%s' % version, p_info + yield release_name(version), p_info def resolve_revision_from( self, known_artifacts: Dict, artifact_metadata: Dict) \ -> Optional[bytes]: shasum = artifact_metadata['dist']['shasum'] for rev_id, known_artifact in known_artifacts.items(): original_artifact = known_artifact['original_artifact'][0] if shasum == original_artifact['checksums']['sha1']: return rev_id def build_revision( self, a_metadata: Dict, uncompressed_path: str) -> Dict: i_metadata = extract_intrinsic_metadata(uncompressed_path) # from intrinsic metadata author = extract_npm_package_author(i_metadata) # extrinsic metadata version = i_metadata['version'] date = self.info['time'][version] date = iso8601.parse_date(date) date = normalize_timestamp(int(date.timestamp())) message = version.encode('ascii') return { 'type': 'tar', 'message': message, 'author': author, 'date': date, 'committer': author, 'committer_date': date, 'parents': [], 'metadata': { 'intrinsic': { 'tool': 'package.json', 'raw': i_metadata, }, 'extrinsic': { 'provider': self.provider_url, 'when': self.visit_date.isoformat(), 'raw': a_metadata, }, }, } diff --git a/swh/loader/package/pypi.py b/swh/loader/package/pypi.py index c0acb7e..87fea96 100644 --- a/swh/loader/package/pypi.py +++ b/swh/loader/package/pypi.py @@ -1,193 +1,193 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os from typing import Any, Dict, Generator, Mapping, Optional, Sequence, Tuple from urllib.parse import urlparse from pkginfo import UnpackedSDist import iso8601 from swh.model.identifiers import normalize_timestamp from swh.loader.package.loader import PackageLoader -from swh.loader.package.utils import api_info +from swh.loader.package.utils import api_info, release_name def pypi_api_url(url: str) -> str: """Compute api url from a project url Args: url (str): PyPI instance's url (e.g: https://pypi.org/project/requests) This deals with correctly transforming the project's api url (e.g https://pypi.org/pypi/requests/json) Returns: api url """ p_url = urlparse(url) project_name = p_url.path.split('/')[-1] url = '%s://%s/pypi/%s/json' % (p_url.scheme, p_url.netloc, project_name) return url def extract_intrinsic_metadata(dir_path: str) -> Dict: """Given an uncompressed path holding the pkginfo file, returns a pkginfo parsed structure as a dict. The release artifact contains at their root one folder. For example: $ tar tvf zprint-0.0.6.tar.gz drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/ ... Args: dir_path (str): Path to the uncompressed directory representing a release artifact from pypi. Returns: the pkginfo parsed structure as a dict if any or None if none was present. """ # Retrieve the root folder of the archive if not os.path.exists(dir_path): return {} lst = os.listdir(dir_path) if len(lst) != 1: return {} project_dirname = lst[0] pkginfo_path = os.path.join(dir_path, project_dirname, 'PKG-INFO') if not os.path.exists(pkginfo_path): return {} pkginfo = UnpackedSDist(pkginfo_path) raw = pkginfo.__dict__ raw.pop('filename') # this gets added with the ondisk location return raw def author(data: Dict) -> Dict: """Given a dict of project/release artifact information (coming from PyPI), returns an author subset. Args: data (dict): Representing either artifact information or release information. Returns: swh-model dict representing a person. """ name = data.get('author') email = data.get('author_email') if email: fullname = '%s <%s>' % (name, email) else: fullname = name if not fullname: return {'fullname': b'', 'name': None, 'email': None} fullname = fullname.encode('utf-8') if name is not None: name = name.encode('utf-8') if email is not None: email = email.encode('utf-8') return {'fullname': fullname, 'name': name, 'email': email} class PyPILoader(PackageLoader): """Load pypi origin's artifact releases into swh archive. """ visit_type = 'pypi' def __init__(self, url): super().__init__(url=url) self._info = None self.provider_url = pypi_api_url(self.url) @property def info(self) -> Dict: """Return the project metadata information (fetched from pypi registry) """ if not self._info: self._info = api_info(self.provider_url) return self._info def get_versions(self) -> Sequence[str]: return self.info['releases'].keys() def get_default_version(self) -> str: return self.info['info']['version'] def get_package_info(self, version: str) -> Generator[ Tuple[str, Mapping[str, Any]], None, None]: res = [] for meta in self.info['releases'][version]: filename = meta['filename'] p_info = { 'url': meta['url'], 'filename': filename, 'raw': meta, } res.append((version, p_info)) if len(res) == 1: version, p_info = res[0] - yield 'releases/%s' % version, p_info + yield release_name(version), p_info else: for version, p_info in res: - yield 'releases/%s/%s' % (version, p_info['filename']), p_info + yield release_name(version, p_info['filename']), p_info def resolve_revision_from( self, known_artifacts: Dict, artifact_metadata: Dict) \ -> Optional[bytes]: sha256 = artifact_metadata['digests']['sha256'] for rev_id, known_artifact in known_artifacts.items(): for original_artifact in known_artifact['original_artifact']: if sha256 == original_artifact['checksums']['sha256']: return rev_id def build_revision( self, a_metadata: Dict, uncompressed_path: str) -> Dict: i_metadata = extract_intrinsic_metadata(uncompressed_path) # from intrinsic metadata name = i_metadata['version'] _author = author(i_metadata) # from extrinsic metadata message = a_metadata.get('comment_text', '') message = '%s: %s' % (name, message) if message else name date = normalize_timestamp( int(iso8601.parse_date(a_metadata['upload_time']).timestamp())) return { 'type': 'tar', 'message': message.encode('utf-8'), 'author': _author, 'date': date, 'committer': _author, 'committer_date': date, 'parents': [], 'metadata': { 'intrinsic': { 'tool': 'PKG-INFO', 'raw': i_metadata, }, 'extrinsic': { 'provider': self.provider_url, 'when': self.visit_date.isoformat(), 'raw': a_metadata, }, } } diff --git a/swh/loader/package/tests/test_utils.py b/swh/loader/package/tests/test_utils.py index 19cc690..3e2d17f 100644 --- a/swh/loader/package/tests/test_utils.py +++ b/swh/loader/package/tests/test_utils.py @@ -1,146 +1,153 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os import pytest -from swh.loader.package.utils import download, api_info +from swh.loader.package.utils import download, api_info, release_name @pytest.mark.fs def test_download_fail_to_download(tmp_path, requests_mock): url = 'https://pypi.org/pypi/arrow/json' status_code = 404 requests_mock.get(url, status_code=status_code) with pytest.raises(ValueError) as e: download(url, tmp_path) assert e.value.args[0] == "Fail to query '%s'. Reason: %s" % ( url, status_code) @pytest.mark.fs def test_download_fail_length_mismatch(tmp_path, requests_mock): """Mismatch length after download should raise """ filename = 'requests-0.0.1.tar.gz' url = 'https://pypi.org/pypi/requests/%s' % filename data = 'this is something' wrong_size = len(data) - 3 requests_mock.get(url, text=data, headers={ 'content-length': str(wrong_size) # wrong size! }) with pytest.raises(ValueError) as e: download(url, dest=str(tmp_path)) assert e.value.args[0] == "Error when checking size: %s != %s" % ( wrong_size, len(data) ) @pytest.mark.fs def test_download_ok(tmp_path, requests_mock): """Download without issue should provide filename and hashes""" filename = 'requests-0.0.1.tar.gz' url = 'https://pypi.org/pypi/requests/%s' % filename data = 'this is something' requests_mock.get(url, text=data, headers={ 'content-length': str(len(data)) }) actual_filepath, actual_hashes = download(url, dest=str(tmp_path)) actual_filename = os.path.basename(actual_filepath) assert actual_filename == filename assert actual_hashes['length'] == len(data) assert actual_hashes['checksums']['sha1'] == 'fdd1ce606a904b08c816ba84f3125f2af44d92b2' # noqa assert (actual_hashes['checksums']['sha256'] == '1d9224378d77925d612c9f926eb9fb92850e6551def8328011b6a972323298d5') @pytest.mark.fs def test_download_ok_with_hashes(tmp_path, requests_mock): """Download without issue should provide filename and hashes""" filename = 'requests-0.0.1.tar.gz' url = 'https://pypi.org/pypi/requests/%s' % filename data = 'this is something' requests_mock.get(url, text=data, headers={ 'content-length': str(len(data)) }) # good hashes for such file good = { 'sha1': 'fdd1ce606a904b08c816ba84f3125f2af44d92b2', 'sha256': '1d9224378d77925d612c9f926eb9fb92850e6551def8328011b6a972323298d5', # noqa } actual_filepath, actual_hashes = download(url, dest=str(tmp_path), hashes=good) actual_filename = os.path.basename(actual_filepath) assert actual_filename == filename assert actual_hashes['length'] == len(data) assert actual_hashes['checksums']['sha1'] == good['sha1'] assert actual_hashes['checksums']['sha256'] == good['sha256'] @pytest.mark.fs def test_download_fail_hashes_mismatch(tmp_path, requests_mock): """Mismatch hash after download should raise """ filename = 'requests-0.0.1.tar.gz' url = 'https://pypi.org/pypi/requests/%s' % filename data = 'this is something' requests_mock.get(url, text=data, headers={ 'content-length': str(len(data)) }) # good hashes for such file good = { 'sha1': 'fdd1ce606a904b08c816ba84f3125f2af44d92b2', 'sha256': '1d9224378d77925d612c9f926eb9fb92850e6551def8328011b6a972323298d5', # noqa } for hash_algo in good.keys(): wrong_hash = good[hash_algo].replace('1', '0') expected_hashes = good.copy() expected_hashes[hash_algo] = wrong_hash # set the wrong hash expected_msg = ("Failure when fetching %s. " "Checksum mismatched: %s != %s" % ( url, wrong_hash, good[hash_algo] )) with pytest.raises(ValueError, match=expected_msg): download(url, dest=str(tmp_path), hashes=expected_hashes) def test_api_info_failure(requests_mock): """Failure to fetch info/release information should raise""" url = 'https://pypi.org/pypi/requests/json' status_code = 400 requests_mock.get(url, status_code=status_code) with pytest.raises(ValueError) as e0: api_info(url) assert e0.value.args[0] == "Fail to query '%s'. Reason: %s" % ( url, status_code ) def test_api_info(requests_mock): """Fetching json info from pypi project should be ok""" url = 'https://pypi.org/pypi/requests/json' requests_mock.get(url, text='{"version": "0.0.1"}') actual_info = api_info(url) assert actual_info == { 'version': '0.0.1', } + + +def test_release_name(): + for version, filename, expected_release in [ + ('0.0.1', None, 'releases/0.0.1'), + ('0.0.2', 'something', 'releases/0.0.2/something')]: + assert release_name(version, filename) == expected_release diff --git a/swh/loader/package/utils.py b/swh/loader/package/utils.py index 920964d..3dedc2b 100644 --- a/swh/loader/package/utils.py +++ b/swh/loader/package/utils.py @@ -1,105 +1,111 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import logging import os import requests from typing import Dict, Optional, Tuple from swh.model.hashutil import MultiHash, HASH_BLOCK_SIZE from swh.loader.package import DEFAULT_PARAMS logger = logging.getLogger(__name__) def api_info(url: str) -> Dict: """Basic api client to retrieve information on project. This deals with fetching json metadata about pypi projects. Args: url (str): The api url (e.g PyPI, npm, etc...) Raises: ValueError in case of query failures (for some reasons: 404, ...) Returns: The associated response's information dict """ response = requests.get(url, **DEFAULT_PARAMS) if response.status_code != 200: raise ValueError("Fail to query '%s'. Reason: %s" % ( url, response.status_code)) return response.json() def download(url: str, dest: str, hashes: Dict = {}, filename: Optional[str] = None) -> Tuple[str, Dict]: """Download a remote tarball from url, uncompresses and computes swh hashes on it. Args: url: Artifact uri to fetch, uncompress and hash dest: Directory to write the archive to hashes: Dict of expected hashes (key is the hash algo) for the artifact to download (those hashes are expected to be hex string) Raises: ValueError in case of any error when fetching/computing (length, checksums mismatched...) Returns: Tuple of local (filepath, hashes of filepath) """ response = requests.get(url, **DEFAULT_PARAMS, stream=True) logger.debug('headers: %s', response.headers) if response.status_code != 200: raise ValueError("Fail to query '%s'. Reason: %s" % ( url, response.status_code)) length = int(response.headers['content-length']) filename = filename if filename else os.path.basename(url) logger.debug('filename: %s', filename) filepath = os.path.join(dest, filename) logger.debug('filepath: %s', filepath) h = MultiHash(length=length) with open(filepath, 'wb') as f: for chunk in response.iter_content(chunk_size=HASH_BLOCK_SIZE): h.update(chunk) f.write(chunk) actual_length = os.path.getsize(filepath) if length != actual_length: raise ValueError('Error when checking size: %s != %s' % ( length, actual_length)) # Also check the expected hashes if provided if hashes: actual_hashes = h.hexdigest() for algo_hash in hashes.keys(): actual_digest = actual_hashes[algo_hash] expected_digest = hashes[algo_hash] if actual_digest != expected_digest: raise ValueError( 'Failure when fetching %s. ' 'Checksum mismatched: %s != %s' % ( url, expected_digest, actual_digest)) extrinsic_metadata = { 'length': length, 'filename': filename, 'checksums': { **h.hexdigest() }, } logger.debug('extrinsic_metadata', extrinsic_metadata) return filepath, extrinsic_metadata + + +def release_name(version: str, filename: Optional[str] = None) -> str: + if filename: + return 'releases/%s/%s' % (version, filename) + return 'releases/%s' % version