diff --git a/requirements.txt b/requirements.txt index 4bebe0d..35eb9f7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,9 +1,10 @@ # Add here external Python modules dependencies, one per line. Module names # should match https://pypi.python.org/pypi names. For the full spec or # dependency lines, see https://pip.readthedocs.org/en/1.1/requirements.html vcversioner retrying psutil requests iso8601 pkginfo +python-debian diff --git a/swh/loader/package/debian.py b/swh/loader/package/debian.py new file mode 100644 index 0000000..47bfde3 --- /dev/null +++ b/swh/loader/package/debian.py @@ -0,0 +1,358 @@ +# Copyright (C) 2017-2019 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +import os +import copy +import datetime +import email.utils +import logging +import re +import subprocess + +from dateutil.parser import parse as parse_date +from debian.changelog import Changelog +from debian.deb822 import Dsc +from typing import Any, Dict, Generator, Mapping, Optional, Sequence, Tuple + +from swh.model import hashutil + +from swh.loader.package.loader import PackageLoader +from swh.loader.package.utils import download + + +logger = logging.getLogger(__name__) +UPLOADERS_SPLIT = re.compile(r'(?<=\>)\s*,\s*') + + +def uid_to_person(uid, encode=True): + """Convert an uid to a person suitable for insertion. + + Args: + uid: an uid of the form "Name " + encode: whether to convert the output to bytes or not + + Returns: + dict: a dictionary with the following keys: + + - name: the name associated to the uid + - email: the mail associated to the uid + """ + + ret = { + 'name': '', + 'email': '', + 'fullname': uid, + } + + name, mail = email.utils.parseaddr(uid) + + if name and email: + ret['name'] = name + ret['email'] = mail + else: + ret['name'] = uid + + if encode: + for key in list(ret): + ret[key] = ret[key].encode('utf-8') + + return ret + + +def download_package(package: Dict, tmpdir: Any) -> Mapping[str, Dict]: + """Fetch a source package in a temporary directory and check the checksums + for all files. + + Args: + package: Dict defining the set of files representing a debian package + tmpdir: Where to download and extract the files to ingest + + Returns: + Dict of swh hashes per filename key + + """ + all_hashes = {} + for filename, fileinfo in copy.deepcopy(package['files']).items(): + uri = fileinfo.pop('uri') + logger.debug('filename: %s', filename) + logger.debug('fileinfo: %s', fileinfo) + extrinsic_hashes = {'sha256': fileinfo['sha256']} + logger.debug('extrinsic_hashes(%s): %s', filename, extrinsic_hashes) + filepath, hashes = download(uri, dest=tmpdir, filename=filename, + hashes=extrinsic_hashes) + all_hashes[filename] = hashes + + logger.debug('all_hashes: %s', all_hashes) + return all_hashes + + +def extract_package(package: Dict, tmpdir: str) -> Tuple[str, str, str]: + """Extract a Debian source package to a given directory. + + Note that after extraction the target directory will be the root of the + extracted package, rather than containing it. + + Args: + package (dict): package information dictionary + tmpdir (str): directory where the package files are stored + + Returns: + tuple: path to the dsc, uri used to retrieve the dsc, extraction + directory + + """ + dsc_name = None + dsc_url = None + + for filename, fileinfo in package['files'].items(): + if filename.endswith('.dsc'): + if dsc_name: + raise ValueError( + 'Package %s_%s references several dsc files' % + (package['name'], package['version']) + ) + dsc_url = fileinfo['uri'] + dsc_name = filename + + dsc_path = os.path.join(tmpdir, dsc_name) + destdir = os.path.join(tmpdir, 'extracted') + logfile = os.path.join(tmpdir, 'extract.log') + + logger.debug('extract Debian source package %s in %s' % + (dsc_path, destdir), extra={ + 'swh_type': 'deb_extract', + 'swh_dsc': dsc_path, + 'swh_destdir': destdir, + }) + + cmd = ['dpkg-source', + '--no-copy', '--no-check', + '--ignore-bad-version', + '-x', dsc_path, + destdir] + + try: + with open(logfile, 'w') as stdout: + subprocess.check_call(cmd, stdout=stdout, stderr=subprocess.STDOUT) + except subprocess.CalledProcessError as e: + logdata = open(logfile, 'r').read() + raise ValueError('dpkg-source exited with code %s: %s' % + (e.returncode, logdata)) from None + + return dsc_path, dsc_url, destdir + + +def get_file_info(filepath): + """Retrieve the original file information from the file at filepath. + + Args: + filepath: the path to the original file + + Returns: + dict: information about the original file, in a dictionary with the + following keys + + - name: the file name + - sha1, sha1_git, sha256: original file hashes + - length: original file length + """ + + name = os.path.basename(filepath) + if isinstance(name, bytes): + name = name.decode('utf-8') + + hashes = hashutil.MultiHash.from_path(filepath).hexdigest() + hashes['name'] = name + hashes['length'] = os.path.getsize(filepath) + return hashes + + +def get_package_metadata(package, dsc_path, extracted_path): + """Get the package metadata from the source package at dsc_path, + extracted in extracted_path. + + Args: + package: the package dict (with a dsc_path key) + dsc_path: path to the package's dsc file + extracted_path: the path where the package got extracted + + Returns: + dict: a dictionary with the following keys: + + - history: list of (package_name, package_version) tuples parsed from + the package changelog + - source_files: information about all the files in the source package + + """ + ret = {} + + with open(dsc_path, 'rb') as dsc: + parsed_dsc = Dsc(dsc) + + source_files = [get_file_info(dsc_path)] + + dsc_dir = os.path.dirname(dsc_path) + for filename in package['files']: + file_path = os.path.join(dsc_dir, filename) + file_info = get_file_info(file_path) + source_files.append(file_info) + + ret['original_artifact'] = source_files + + # Parse the changelog to retrieve the rest of the package information + changelog_path = os.path.join(extracted_path, 'debian/changelog') + with open(changelog_path, 'rb') as changelog: + try: + parsed_changelog = Changelog(changelog) + except UnicodeDecodeError: + logger.warning('Unknown encoding for changelog %s,' + ' falling back to iso' % + changelog_path.decode('utf-8'), extra={ + 'swh_type': 'deb_changelog_encoding', + 'swh_name': package['name'], + 'swh_version': str(package['version']), + 'swh_changelog': changelog_path.decode('utf-8'), + }) + + # need to reset as Changelog scrolls to the end of the file + changelog.seek(0) + parsed_changelog = Changelog(changelog, encoding='iso-8859-15') + + package_info = { + 'name': package['name'], + 'version': str(package['version']), + 'changelog': { + 'person': uid_to_person(parsed_changelog.author), + 'date': parse_date(parsed_changelog.date), + 'history': [(block.package, str(block.version)) + for block in parsed_changelog][1:], + } + } + + maintainers = [ + uid_to_person(parsed_dsc['Maintainer'], encode=False), + ] + maintainers.extend( + uid_to_person(person, encode=False) + for person in UPLOADERS_SPLIT.split(parsed_dsc.get('Uploaders', '')) + ) + package_info['maintainers'] = maintainers + + ret['package_info'] = package_info + + return ret + + +class DebianLoader(PackageLoader): + """Load debian origins into swh archive. + + """ + visit_type = 'debian' + + def __init__(self, url: str, date: str, packages: Mapping[str, Dict]): + super().__init__(url=url) + self._info = None + self.packages = packages + self.dsc_path = None + self.dsc_url = None + + def get_versions(self) -> Sequence[str]: + """Returns the keys of the packages input (e.g. + stretch/contrib/0.7.2-3, etc...) + + """ + return self.packages.keys() + + def get_default_release(self) -> str: + """Take the first version as default release + + """ + return list(self.packages.keys())[0] + + def get_artifacts(self, version: str) -> Generator[ + Tuple[str, str, Dict], None, None]: + url = '' # url is not useful to retrieve the package files here + a_metadata = self.packages[version] + yield version, url, a_metadata # we care only for version, a_metadata + + def resolve_revision_from( + self, known_artifacts: Dict, artifact_metadata: Dict) \ + -> Optional[bytes]: + pass # for now + + def download_package(self, a_uri: str, tmpdir: str, filename: str, + a_metadata: Dict) -> Tuple[str, Dict]: + """Contrary to other package loaders (1 package, 1 artifact), + `a_metadata` represents the package's datafiles set to fetch: + - .orig.tar.gz + - .dsc + - .diff.gz + + This is delegated to the `download_package` function. + + """ + logger.debug('debian: a_metadata: %s', a_metadata) + a_c_metadata = download_package(a_metadata, tmpdir) + return tmpdir, a_c_metadata + + def uncompress(self, a_path: str, tmpdir: str, a_metadata: Dict) -> str: + self.dsc_path, self.dsc_url, a_uncompressed_path = extract_package( + a_metadata, tmpdir) + return a_uncompressed_path + + def read_intrinsic_metadata(self, a_metadata: Dict, + a_uncompressed_path: str) -> Dict: + dsc_path = self.dsc_path # XXX + i_metadata = get_package_metadata( + a_metadata, dsc_path, a_uncompressed_path) + return i_metadata + + def build_revision( + self, a_metadata: Dict, i_metadata: Dict) -> Dict: + + logger.debug('i_metadata: %s', i_metadata) + logger.debug('a_metadata: %s', a_metadata) + + def prepare(obj): + if isinstance(obj, list): + return [prepare(item) for item in obj] + elif isinstance(obj, dict): + return {k: prepare(v) for k, v in obj.items()} + elif isinstance(obj, datetime.datetime): + return obj.isoformat() + elif isinstance(obj, bytes): + return obj.decode('utf-8') + else: + return copy.deepcopy(obj) + + package_info = i_metadata['package_info'] + + msg = 'Synthetic revision for Debian source package %s version %s' % ( + a_metadata['name'], a_metadata['version']) + + date = package_info['changelog']['date'] + author = package_info['changelog']['person'] + + # inspired from swh.loader.debian.converters.package_metadata_to_revision # noqa + return { + 'type': 'dsc', + 'message': msg.encode('utf-8'), + 'author': author, + 'date': date, + 'committer': author, + 'committer_date': date, + 'parents': [], + 'metadata': { + 'intrinsic': { + 'tool': 'dsc', + 'raw': prepare(package_info), + }, + 'extrinsic': { + 'provider': self.dsc_url, + 'when': self.visit_date.isoformat(), + 'raw': a_metadata, + }, + } + } diff --git a/swh/loader/package/deposit.py b/swh/loader/package/deposit.py index 9d4c153..8c16acf 100644 --- a/swh/loader/package/deposit.py +++ b/swh/loader/package/deposit.py @@ -1,150 +1,151 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import logging from typing import Generator, Dict, Tuple, Sequence from swh.model.hashutil import hash_to_hex from swh.loader.package.loader import PackageLoader from swh.deposit.client import PrivateApiDepositClient as ApiClient logger = logging.getLogger(__name__) class DepositLoader(PackageLoader): """Load pypi origin's artifact releases into swh archive. """ visit_type = 'deposit' def __init__(self, url: str, deposit_id: str): """Constructor Args: url: Origin url to associate the artifacts/metadata to deposit_id: Deposit identity """ super().__init__(url=url) # For now build back existing api urls # archive_url: Private api url to retrieve archive artifact self.archive_url = '/%s/raw/' % deposit_id # metadata_url: Private api url to retrieve the deposit metadata self.metadata_url = '/%s/meta/' % deposit_id # deposit_update_url: Private api to push pids and status update on the # deposit id self.deposit_update_url = '/%s/update/' % deposit_id self.client = ApiClient() self._metadata = None @property def metadata(self): if self._metadata is None: self._metadata = self.client.metadata_get(self.metadata_url) return self._metadata def get_versions(self) -> Sequence[str]: # only 1 branch 'HEAD' with no alias since we only have 1 snapshot # branch return ['HEAD'] def get_artifacts(self, version: str) -> Generator[ Tuple[str, str, Dict], None, None]: filename = 'archive.zip' # do not care about it here url = self.client.base_url + self.archive_url yield filename, url, self.metadata def build_revision( - self, a_metadata: Dict, a_uncompressed_path: str) -> Dict: + self, a_metadata: Dict, i_metadata: Dict) -> Dict: revision = a_metadata.pop('revision') metadata = { 'extrinsic': { 'provider': '%s/%s' % ( self.client.base_url, self.metadata_url), 'when': self.visit_date.isoformat(), 'raw': a_metadata, }, } # FIXME: the deposit no longer needs to build the revision revision['metadata'].update(metadata) revision['author'] = parse_author(revision['author']) revision['committer'] = parse_author(revision['committer']) revision['message'] = revision['message'].encode('utf-8') + revision['type'] = 'tar' return revision def load(self) -> Dict: # Usual loading r = super().load() success = r['status'] != 'failed' if success: # Update archive with metadata information origin_metadata = self.metadata['origin_metadata'] logger.debug('origin_metadata: %s', origin_metadata) tools = self.storage.tool_add([origin_metadata['tool']]) logger.debug('tools: %s', tools) tool_id = tools[0]['id'] provider = origin_metadata['provider'] # FIXME: Shall we delete this info? provider_id = self.storage.metadata_provider_add( provider['provider_name'], provider['provider_type'], provider['provider_url'], metadata=None) metadata = origin_metadata['metadata'] self.storage.origin_metadata_add( self.url, self.visit_date, provider_id, tool_id, metadata) # Update deposit status try: if not success: self.client.status_update( self.deposit_update_url, status='failed') return r snapshot_id = r['snapshot_id'] branches = self.storage.snapshot_get(snapshot_id)['branches'] logger.debug('branches: %s', branches) if not branches: return r rev_id = branches[b'HEAD']['target'] revision = next(self.storage.revision_get([rev_id])) # Retrieve the revision identifier dir_id = revision['directory'] # update the deposit's status to success with its # revision-id and directory-id self.client.status_update( self.deposit_update_url, status='done', revision_id=hash_to_hex(rev_id), directory_id=hash_to_hex(dir_id), origin_url=self.url) except Exception: logger.exception( 'Problem when trying to update the deposit\'s status') return {'status': 'failed'} return r def parse_author(author): """See prior fixme """ return { 'fullname': author['fullname'].encode('utf-8'), 'name': author['name'].encode('utf-8'), 'email': author['email'].encode('utf-8'), } diff --git a/swh/loader/package/gnu.py b/swh/loader/package/gnu.py index 6dbfbad..6401dfc 100644 --- a/swh/loader/package/gnu.py +++ b/swh/loader/package/gnu.py @@ -1,187 +1,188 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import logging import re from os import path from typing import Dict, Generator, Optional, Sequence, Tuple from swh.loader.package.loader import PackageLoader from swh.model.identifiers import normalize_timestamp logger = logging.getLogger(__name__) # to recognize existing naming pattern extensions = [ 'zip', 'tar', 'gz', 'tgz', 'bz2', 'bzip2', 'lzma', 'lz', 'xz', 'Z', ] version_keywords = [ 'cygwin_me', 'w32', 'win32', 'nt', 'cygwin', 'mingw', 'latest', 'alpha', 'beta', 'release', 'stable', 'hppa', 'solaris', 'sunos', 'sun4u', 'sparc', 'sun', 'aix', 'ibm', 'rs6000', 'i386', 'i686', 'linux', 'redhat', 'linuxlibc', 'mips', 'powerpc', 'macos', 'apple', 'darwin', 'macosx', 'powermacintosh', 'unknown', 'netbsd', 'freebsd', 'sgi', 'irix', ] # Match a filename into components. # # We use Debian's release number heuristic: A release number starts # with a digit, and is followed by alphanumeric characters or any of # ., +, :, ~ and - # # We hardcode a list of possible extensions, as this release number # scheme would match them too... We match on any combination of those. # # Greedy matching is done right to left (we only match the extension # greedily with +, software_name and release_number are matched lazily # with +? and *?). pattern = r''' ^ (?: # We have a software name and a release number, separated with a # -, _ or dot. (?P.+?[-_.]) (?P(%(vkeywords)s|[0-9][0-9a-zA-Z_.+:~-]*?)+) | # We couldn't match a release number, put everything in the # software name. (?P.+?) ) (?P(?:\.(?:%(extensions)s))+) $ ''' % { 'extensions': '|'.join(extensions), 'vkeywords': '|'.join('%s[-]?' % k for k in version_keywords), } def get_version(url: str) -> str: """Extract branch name from tarball url Args: url (str): Tarball URL Returns: byte: Branch name Example: For url = https://ftp.gnu.org/gnu/8sync/8sync-0.2.0.tar.gz >>> get_version(url) '0.2.0' """ filename = path.split(url)[-1] m = re.match(pattern, filename, flags=re.VERBOSE | re.IGNORECASE) if m: d = m.groupdict() if d['software_name1'] and d['release_number']: return d['release_number'] if d['software_name2']: return d['software_name2'] return '' class GNULoader(PackageLoader): visit_type = 'gnu' SWH_PERSON = { 'name': b'Software Heritage', 'fullname': b'Software Heritage', 'email': b'robot@softwareheritage.org' } REVISION_MESSAGE = b'swh-loader-package: synthetic revision message' def __init__(self, package_url: str, tarballs: Sequence): """Loader constructor. For now, this is the lister's task output. Args: package_url: Origin url tarballs: List of dict with keys `date` (date) and `archive` (str) the url to retrieve one versioned archive """ super().__init__(url=package_url) self.tarballs = list(sorted(tarballs, key=lambda v: v['time'])) def get_versions(self) -> Sequence[str]: versions = [] for archive in self.tarballs: v = get_version(archive['archive']) if v: versions.append(v) return versions def get_default_release(self) -> str: # It's the most recent, so for this loader, it's the last one return get_version(self.tarballs[-1]['archive']) def get_artifacts(self, version: str) -> Generator[ Tuple[str, str, Dict], None, None]: for a_metadata in self.tarballs: url = a_metadata['archive'] artifact_version = get_version(url) if version == artifact_version: filename = path.split(url)[-1] yield filename, url, a_metadata def resolve_revision_from( self, known_artifacts: Dict, artifact_metadata: Dict) \ -> Optional[bytes]: def pk(d): return [d.get(k) for k in ['time', 'archive', 'length']] artifact_pk = pk(artifact_metadata) for rev_id, known_artifact in known_artifacts.items(): logging.debug('known_artifact: %s', known_artifact) known_pk = pk(known_artifact['extrinsic']['raw']) if artifact_pk == known_pk: return rev_id def build_revision( - self, a_metadata: Dict, a_uncompressed_path: str) -> Dict: + self, a_metadata: Dict, i_metadata: Dict) -> Dict: normalized_date = normalize_timestamp(int(a_metadata['time'])) return { + 'type': 'tar', 'message': self.REVISION_MESSAGE, 'date': normalized_date, 'author': self.SWH_PERSON, 'committer': self.SWH_PERSON, 'committer_date': normalized_date, 'parents': [], 'metadata': { 'intrinsic': {}, 'extrinsic': { 'provider': self.url, 'when': self.visit_date.isoformat(), 'raw': a_metadata, }, }, } diff --git a/swh/loader/package/loader.py b/swh/loader/package/loader.py index 7b6c529..8783c8f 100644 --- a/swh/loader/package/loader.py +++ b/swh/loader/package/loader.py @@ -1,356 +1,397 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import logging import tempfile import os from typing import Generator, Dict, Tuple, Sequence, List, Optional from swh.core.tarball import uncompress from swh.core.config import SWHConfig from swh.model.from_disk import Directory from swh.model.identifiers import ( revision_identifier, snapshot_identifier, identifier_to_bytes ) from swh.storage import get_storage from swh.storage.algos.snapshot import snapshot_get_all_branches from swh.loader.core.converters import content_for_storage from swh.loader.package.utils import download logger = logging.getLogger(__name__) # Not implemented yet: # - clean up disk routines from previous killed workers (when OOMkilled) # -> separation of concern would like this to be abstracted from the code # -> experience tells us it's complicated to do as such (T903, T964, T982, # etc...) # # - model: swh.model.merkle.from_disk should output swh.model.model.* objects # to avoid this layer's conversion routine call # -> Take this up within swh.model's current implementation class PackageLoader: # Origin visit type (str) set by the loader visit_type = '' def __init__(self, url): """Loader's constructor. This raises exception if the minimal required configuration is missing (cf. fn:`check` method). Args: url (str): Origin url to load data from """ # This expects to use the environment variable SWH_CONFIG_FILENAME self.config = SWHConfig.parse_config_file() self._check_configuration() self.storage = get_storage(**self.config['storage']) self.url = url self.visit_date = datetime.datetime.now(tz=datetime.timezone.utc) def _check_configuration(self): """Checks the minimal configuration required is set for the loader. If some required configuration is missing, exception detailing the issue is raised. """ if 'storage' not in self.config: raise ValueError( 'Misconfiguration, at least the storage key should be set') def get_versions(self) -> Sequence[str]: """Return the list of all published package versions. Returns: Sequence of published versions """ return [] def get_artifacts(self, version: str) -> Generator[ Tuple[str, str, Dict], None, None]: """Given a release version of a package, retrieve the associated artifact information for such version. Args: version: Package version Returns: (artifact filename, artifact uri, raw artifact metadata) """ yield from {} def build_revision( - self, a_metadata: Dict, a_uncompressed_path: str) -> Dict: - """Build the revision dict + self, a_metadata: Dict, i_metadata: Dict) -> Dict: + """Build the revision dict from the archive metadata (extrinsic + artifact metadata) and the intrinsic metadata. Returns: SWH data dict """ return {} def get_default_release(self) -> str: """Retrieve the latest release version Returns: Latest version """ return '' def last_snapshot(self) -> Optional[Dict]: """Retrieve the last snapshot """ visit = self.storage.origin_visit_get_latest( self.url, require_snapshot=True) if visit: return snapshot_get_all_branches( self.storage, visit['snapshot']['id']) def known_artifacts(self, snapshot: Dict) -> [Dict]: """Retrieve the known releases/artifact for the origin. Args snapshot: snapshot for the visit Returns: Dict of keys revision id (bytes), values a metadata Dict. """ if not snapshot or 'branches' not in snapshot: return {} # retrieve only revisions (e.g the alias we do not want here) revs = [rev['target'] for rev in snapshot['branches'].values() if rev and rev['target_type'] == 'revision'] known_revisions = self.storage.revision_get(revs) ret = {} for revision in known_revisions: if not revision: # revision_get can return None continue ret[revision['id']] = revision['metadata'] return ret def resolve_revision_from( self, known_artifacts: Dict, artifact_metadata: Dict) \ -> Optional[bytes]: """Resolve the revision from a snapshot and an artifact metadata dict. If the artifact has already been downloaded, this will return the existing revision targeting that uncompressed artifact directory. Otherwise, this returns None. Args: snapshot: Snapshot artifact_metadata: Information dict Returns: None or revision identifier """ return None + def download_package(self, a_uri: str, tmpdir: str, filename: str, + a_metadata: Dict) -> Tuple[str, Dict]: + """Download package from uri within the tmpdir (with name filename). + Optionally, this can also use the a_metadata information to retrieve + more information. + + Note: Default implementation does not use the a_metadata (debian + implementation does) + + """ + return download(a_uri, dest=tmpdir, filename=filename) + + def read_intrinsic_metadata( + self, a_metadata: Dict, a_uncompressed_path: str) -> Dict: + """Read intrinsic metadata from either the a_metadata or + the uncompressed path. + + Depending on the implementations, some extracts directly from the + artifacts to ingest (pypi, npm...), some use api to access directly + their intrinsic metadata (debian exposes a dsc through uri) or some + have none (gnu). + + """ + return {} + + def uncompress( + self, a_path: str, tmpdir: str, a_metadata: Dict) -> str: + """Uncompress the artfifact(s) stored at a_path to tmpdir. + + Optionally, this could need to use the a_metadata dict for some more + information (debian). + + """ + uncompressed_path = os.path.join(tmpdir, 'src') + uncompress(a_path, dest=uncompressed_path) + return uncompressed_path + def load(self) -> Dict: """Load for a specific origin the associated contents. for each package version of the origin 1. Fetch the files for one package version By default, this can be implemented as a simple HTTP request. Loaders with more specific requirements can override this, e.g.: the PyPI loader checks the integrity of the downloaded files; the Debian loader has to download and check several files for one package version. 2. Extract the downloaded files By default, this would be a universal archive/tarball extraction. Loaders for specific formats can override this method (for instance, the Debian loader uses dpkg-source -x). 3. Convert the extracted directory to a set of Software Heritage objects Using swh.model.from_disk. 4. Extract the metadata from the unpacked directories This would only be applicable for "smart" loaders like npm (parsing the package.json), PyPI (parsing the PKG-INFO file) or Debian (parsing debian/changelog and debian/control). On "minimal-metadata" sources such as the GNU archive, the lister should provide the minimal set of metadata needed to populate the revision/release objects (authors, dates) as an argument to the task. 5. Generate the revision/release objects for the given version. From the data generated at steps 3 and 4. end for each 6. Generate and load the snapshot for the visit Using the revisions/releases collected at step 5., and the branch information from step 0., generate a snapshot and load it into the Software Heritage archive """ status_load = 'uneventful' # either: eventful, uneventful, failed status_visit = 'full' # either: partial, full tmp_revisions = {} # type: Dict[str, List] snapshot = None try: # Prepare origin and origin_visit origin = {'url': self.url} self.storage.origin_add([origin]) visit_id = self.storage.origin_visit_add( origin=self.url, date=self.visit_date, type=self.visit_type)['visit'] last_snapshot = self.last_snapshot() logger.debug('last snapshot: %s', last_snapshot) known_artifacts = self.known_artifacts(last_snapshot) logger.debug('known artifacts: %s', known_artifacts) # Retrieve the default release (the "latest" one) default_release = self.get_default_release() logger.debug('default release: %s', default_release) for version in self.get_versions(): # for each logger.debug('version: %s', version) tmp_revisions[version] = [] # `a_` stands for `artifact_` for a_filename, a_uri, a_metadata in self.get_artifacts( version): logger.debug('a_metadata: %s', a_metadata) revision_id = self.resolve_revision_from( known_artifacts, a_metadata) if revision_id is None: with tempfile.TemporaryDirectory() as tmpdir: try: # a_c_: archive_computed_ - a_path, a_c_metadata = download( - a_uri, dest=tmpdir, filename=a_filename) + a_path, a_c_metadata = self.download_package( + a_uri, tmpdir, a_filename, + a_metadata=a_metadata) except Exception: logger.exception('Unable to retrieve %s', a_uri) status_visit = 'partial' continue logger.debug('archive_path: %s', a_path) logger.debug('archive_computed_metadata: %s', a_c_metadata) - uncompressed_path = os.path.join(tmpdir, 'src') - uncompress(a_path, dest=uncompressed_path) - + uncompressed_path = self.uncompress( + a_path, tmpdir, a_metadata) logger.debug('uncompressed_path: %s', uncompressed_path) directory = Directory.from_disk( - path=uncompressed_path.encode('utf-8'), data=True) # noqa + path=uncompressed_path.encode('utf-8'), + data=True) # noqa # FIXME: Try not to load the full raw content in # memory objects = directory.collect() contents = objects['content'].values() logger.debug('Number of contents: %s', len(contents)) self.storage.content_add( map(content_for_storage, contents)) status_load = 'eventful' directories = objects['directory'].values() logger.debug('Number of directories: %s', len(directories)) self.storage.directory_add(directories) + i_metadata = self.read_intrinsic_metadata( + a_metadata, uncompressed_path) + # FIXME: This should be release. cf. D409 revision = self.build_revision( - a_metadata, uncompressed_path) + a_metadata, i_metadata) revision.update({ - 'type': 'tar', 'synthetic': True, 'directory': directory.hash, }) revision['metadata'].update({ 'original_artifact': a_c_metadata, }) revision['id'] = revision_id = identifier_to_bytes( revision_identifier(revision)) logger.debug('Revision: %s', revision) self.storage.revision_add([revision]) tmp_revisions[version].append((a_filename, revision_id)) # Build and load the snapshot branches = {} for version, v_branches in tmp_revisions.items(): if len(v_branches) == 1: branch_name = ( version if version == 'HEAD' else 'releases/%s' % version).encode('utf-8') if version == default_release: branches[b'HEAD'] = { 'target_type': 'alias', 'target': branch_name, } branches[branch_name] = { 'target_type': 'revision', 'target': v_branches[0][1], } else: for filename, target in v_branches: branch_name = ('releases/%s/%s' % ( version, filename)).encode('utf-8') branches[branch_name] = { 'target_type': 'revision', 'target': target, } snapshot = { 'branches': branches } logger.debug('snapshot: %s', snapshot) snapshot['id'] = identifier_to_bytes( snapshot_identifier(snapshot)) logger.debug('snapshot: %s', snapshot) self.storage.snapshot_add([snapshot]) if hasattr(self.storage, 'flush'): self.storage.flush() except Exception: logger.exception('Fail to load %s' % self.url) status_visit = 'partial' status_load = 'failed' finally: self.storage.origin_visit_update( origin=self.url, visit_id=visit_id, status=status_visit, snapshot=snapshot) result = { 'status': status_load, } if snapshot: result['snapshot_id'] = snapshot['id'] return result diff --git a/swh/loader/package/npm.py b/swh/loader/package/npm.py index 4aca83e..7cc3616 100644 --- a/swh/loader/package/npm.py +++ b/swh/loader/package/npm.py @@ -1,292 +1,295 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import logging import os import re from codecs import BOM_UTF8 from typing import Generator, Dict, Tuple, Sequence, Optional import chardet import iso8601 from swh.model.identifiers import normalize_timestamp from swh.loader.package.loader import PackageLoader from swh.loader.package.utils import api_info logger = logging.getLogger(__name__) _EMPTY_AUTHOR = {'fullname': b'', 'name': None, 'email': None} # https://github.com/jonschlinkert/author-regex _author_regexp = r'([^<(]+?)?[ \t]*(?:<([^>(]+?)>)?[ \t]*(?:\(([^)]+?)\)|$)' def parse_npm_package_author(author_str): """ Parse npm package author string. It works with a flexible range of formats, as detailed below:: name name (url) name (url) name (url) name(url) name (url) name (url) name(url) name(url) name (url) name(url) name name (url) (url) (url) (url) (url) Args: author_str (str): input author string Returns: dict: A dict that may contain the following keys: * name * email * url """ author = {} matches = re.findall(_author_regexp, author_str.replace('<>', '').replace('()', ''), re.M) for match in matches: if match[0].strip(): author['name'] = match[0].strip() if match[1].strip(): author['email'] = match[1].strip() if match[2].strip(): author['url'] = match[2].strip() return author def extract_npm_package_author(package_json): """ Extract package author from a ``package.json`` file content and return it in swh format. Args: package_json (dict): Dict holding the content of parsed ``package.json`` file Returns: dict: A dict with the following keys: * fullname * name * email """ def _author_str(author_data): if type(author_data) is dict: author_str = '' if 'name' in author_data: author_str += author_data['name'] if 'email' in author_data: author_str += ' <%s>' % author_data['email'] return author_str elif type(author_data) is list: return _author_str(author_data[0]) if len(author_data) > 0 else '' else: return author_data author_data = {} for author_key in ('author', 'authors'): if author_key in package_json: author_str = _author_str(package_json[author_key]) author_data = parse_npm_package_author(author_str) name = author_data.get('name') email = author_data.get('email') fullname = None if name and email: fullname = '%s <%s>' % (name, email) elif name: fullname = name if not fullname: return _EMPTY_AUTHOR if fullname: fullname = fullname.encode('utf-8') if name: name = name.encode('utf-8') if email: email = email.encode('utf-8') return {'fullname': fullname, 'name': name, 'email': email} def _lstrip_bom(s, bom=BOM_UTF8): if s.startswith(bom): return s[len(bom):] else: return s def load_json(json_bytes): """ Try to load JSON from bytes and return a dictionary. First try to decode from utf-8. If the decoding failed, try to detect the encoding and decode again with replace error handling. If JSON is malformed, an empty dictionary will be returned. Args: json_bytes (bytes): binary content of a JSON file Returns: dict: JSON data loaded in a dictionary """ json_data = {} try: json_str = _lstrip_bom(json_bytes).decode('utf-8') except UnicodeDecodeError: encoding = chardet.detect(json_bytes)['encoding'] if encoding: json_str = json_bytes.decode(encoding, 'replace') try: json_data = json.loads(json_str) except json.decoder.JSONDecodeError: pass return json_data def extract_intrinsic_metadata(dir_path: str) -> Dict: """Given an uncompressed path holding the pkginfo file, returns a pkginfo parsed structure as a dict. The release artifact contains at their root one folder. For example: $ tar tvf zprint-0.0.6.tar.gz drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/ ... Args: dir_path (str): Path to the uncompressed directory representing a release artifact from npm. Returns: the pkginfo parsed structure as a dict if any or None if none was present. """ # Retrieve the root folder of the archive if not os.path.exists(dir_path): return {} lst = os.listdir(dir_path) if len(lst) == 0: return {} project_dirname = lst[0] package_json_path = os.path.join(dir_path, project_dirname, 'package.json') if not os.path.exists(package_json_path): return {} with open(package_json_path, 'rb') as package_json_file: package_json_bytes = package_json_file.read() return load_json(package_json_bytes) class NpmLoader(PackageLoader): visit_type = 'npm' def __init__(self, package_name, package_url, package_metadata_url): super().__init__(url=package_url) self.provider_url = package_metadata_url self._info = None self._versions = None # if package_url is None: # package_url = 'https://www.npmjs.com/package/%s' % package_name # if package_metadata_url is None: # package_metadata_url = 'https://replicate.npmjs.com/%s/' %\ # quote(package_name, safe='') @property def info(self) -> Dict: """Return the project metadata information (fetched from npm registry) """ if not self._info: self._info = api_info(self.provider_url) return self._info def get_versions(self) -> Sequence[str]: return sorted(self.info['versions'].keys()) def get_default_release(self) -> str: return self.info['dist-tags'].get('latest', '') def get_artifacts(self, version: str) -> Generator[ Tuple[str, str, Dict], None, None]: meta = self.info['versions'][version] url = meta['dist']['tarball'] filename = os.path.basename(url) yield filename, url, meta def resolve_revision_from( self, known_artifacts: Dict, artifact_metadata: Dict) \ -> Optional[bytes]: shasum = artifact_metadata['dist']['shasum'] for rev_id, known_artifact in known_artifacts.items(): original_artifact = known_artifact['original_artifact'] if shasum == original_artifact['checksums']['sha1']: return rev_id + def read_intrinsic_metadata(self, a_metadata: Dict, + a_uncompressed_path: str) -> Dict: + return extract_intrinsic_metadata(a_uncompressed_path) + def build_revision( - self, a_metadata: Dict, a_uncompressed_path: str) -> Dict: - # Parse metadata (project, artifact metadata) - i_metadata = extract_intrinsic_metadata(a_uncompressed_path) + self, a_metadata: Dict, i_metadata: Dict) -> Dict: # from intrinsic metadata author = extract_npm_package_author(i_metadata) # extrinsic metadata version = i_metadata['version'] date = self.info['time'][version] date = iso8601.parse_date(date) date = normalize_timestamp(int(date.timestamp())) message = version.encode('ascii') return { + 'type': 'tar', + 'message': message, 'author': author, 'date': date, 'committer': author, 'committer_date': date, - 'message': message, 'parents': [], 'metadata': { 'intrinsic': { 'tool': 'package.json', 'raw': i_metadata, }, 'extrinsic': { 'provider': self.provider_url, 'when': self.visit_date.isoformat(), 'raw': a_metadata, }, }, } diff --git a/swh/loader/package/pypi.py b/swh/loader/package/pypi.py index 0ecc10d..282b329 100644 --- a/swh/loader/package/pypi.py +++ b/swh/loader/package/pypi.py @@ -1,179 +1,182 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import os from typing import Generator, Dict, Tuple, Sequence, Optional from urllib.parse import urlparse from pkginfo import UnpackedSDist import iso8601 from swh.model.identifiers import normalize_timestamp from swh.loader.package.loader import PackageLoader from swh.loader.package.utils import api_info def pypi_api_url(url: str) -> str: """Compute api url from a project url Args: url (str): PyPI instance's url (e.g: https://pypi.org/project/requests) This deals with correctly transforming the project's api url (e.g https://pypi.org/pypi/requests/json) Returns: api url """ p_url = urlparse(url) project_name = p_url.path.split('/')[-1] url = '%s://%s/pypi/%s/json' % (p_url.scheme, p_url.netloc, project_name) return url def extract_intrinsic_metadata(dir_path: str) -> Dict: """Given an uncompressed path holding the pkginfo file, returns a pkginfo parsed structure as a dict. The release artifact contains at their root one folder. For example: $ tar tvf zprint-0.0.6.tar.gz drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/ ... Args: dir_path (str): Path to the uncompressed directory representing a release artifact from pypi. Returns: the pkginfo parsed structure as a dict if any or None if none was present. """ # Retrieve the root folder of the archive if not os.path.exists(dir_path): return {} lst = os.listdir(dir_path) if len(lst) != 1: return {} project_dirname = lst[0] pkginfo_path = os.path.join(dir_path, project_dirname, 'PKG-INFO') if not os.path.exists(pkginfo_path): return {} pkginfo = UnpackedSDist(pkginfo_path) raw = pkginfo.__dict__ raw.pop('filename') # this gets added with the ondisk location return raw def author(data: Dict) -> Dict: """Given a dict of project/release artifact information (coming from PyPI), returns an author subset. Args: data (dict): Representing either artifact information or release information. Returns: swh-model dict representing a person. """ name = data.get('author') email = data.get('author_email') if email: fullname = '%s <%s>' % (name, email) else: fullname = name if not fullname: return {'fullname': b'', 'name': None, 'email': None} fullname = fullname.encode('utf-8') if name is not None: name = name.encode('utf-8') if email is not None: email = email.encode('utf-8') return {'fullname': fullname, 'name': name, 'email': email} class PyPILoader(PackageLoader): """Load pypi origin's artifact releases into swh archive. """ visit_type = 'pypi' def __init__(self, url): super().__init__(url=url) self._info = None self.provider_url = pypi_api_url(self.url) @property def info(self) -> Dict: """Return the project metadata information (fetched from pypi registry) """ if not self._info: self._info = api_info(self.provider_url) return self._info def get_versions(self) -> Sequence[str]: return self.info['releases'].keys() def get_default_release(self) -> str: return self.info['info']['version'] def get_artifacts(self, version: str) -> Generator[ Tuple[str, str, Dict], None, None]: for meta in self.info['releases'][version]: yield meta['filename'], meta['url'], meta def resolve_revision_from( self, known_artifacts: Dict, artifact_metadata: Dict) \ -> Optional[bytes]: sha256 = artifact_metadata['digests']['sha256'] for rev_id, known_artifact in known_artifacts.items(): original_artifact = known_artifact['original_artifact'] if sha256 == original_artifact['checksums']['sha256']: return rev_id + def read_intrinsic_metadata(self, a_metadata: Dict, + a_uncompressed_path: str) -> Dict: + return extract_intrinsic_metadata(a_uncompressed_path) + def build_revision( - self, a_metadata: Dict, a_uncompressed_path: str) -> Dict: - # Parse metadata (project, artifact metadata) - metadata = extract_intrinsic_metadata(a_uncompressed_path) + self, a_metadata: Dict, i_metadata: Dict) -> Dict: # from intrinsic metadata - name = metadata['version'] - _author = author(metadata) + name = i_metadata['version'] + _author = author(i_metadata) # from extrinsic metadata message = a_metadata.get('comment_text', '') message = '%s: %s' % (name, message) if message else name date = normalize_timestamp( int(iso8601.parse_date(a_metadata['upload_time']).timestamp())) return { + 'type': 'tar', 'message': message.encode('utf-8'), 'author': _author, 'date': date, 'committer': _author, 'committer_date': date, 'parents': [], 'metadata': { 'intrinsic': { 'tool': 'PKG-INFO', - 'raw': metadata, + 'raw': i_metadata, }, 'extrinsic': { 'provider': self.provider_url, 'when': self.visit_date.isoformat(), 'raw': a_metadata, }, } } diff --git a/swh/loader/package/tests/conftest.py b/swh/loader/package/tests/conftest.py index f6e787f..2eab00a 100644 --- a/swh/loader/package/tests/conftest.py +++ b/swh/loader/package/tests/conftest.py @@ -1,15 +1,28 @@ # Copyright (C) 2019 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import pytest +import re +from functools import partial from os import path +from swh.core.pytest_plugin import get_response_cb + @pytest.fixture def swh_config(monkeypatch, datadir): conffile = path.join(datadir, 'loader.yml') monkeypatch.setenv('SWH_CONFIG_FILENAME', conffile) return conffile + + +@pytest.fixture +def requests_mock_http_datadir(requests_mock_datadir, datadir): + # hack: main fixture does not support http query yet + requests_mock_datadir.get(re.compile('http://'), body=partial( + get_response_cb, datadir=datadir)) + + return requests_mock_datadir diff --git a/swh/loader/package/tests/data/deb.debian.org/debian__pool_contrib_c_cicero_cicero_0.7.2-3.diff.gz b/swh/loader/package/tests/data/deb.debian.org/debian__pool_contrib_c_cicero_cicero_0.7.2-3.diff.gz new file mode 100644 index 0000000..834ac91 Binary files /dev/null and b/swh/loader/package/tests/data/deb.debian.org/debian__pool_contrib_c_cicero_cicero_0.7.2-3.diff.gz differ diff --git a/swh/loader/package/tests/data/deb.debian.org/debian__pool_contrib_c_cicero_cicero_0.7.2-3.dsc b/swh/loader/package/tests/data/deb.debian.org/debian__pool_contrib_c_cicero_cicero_0.7.2-3.dsc new file mode 100644 index 0000000..1f94b20 --- /dev/null +++ b/swh/loader/package/tests/data/deb.debian.org/debian__pool_contrib_c_cicero_cicero_0.7.2-3.dsc @@ -0,0 +1,44 @@ +-----BEGIN PGP SIGNED MESSAGE----- +Hash: SHA512 + +Format: 1.0 +Source: cicero +Binary: cicero +Architecture: all +Version: 0.7.2-3 +Maintainer: Debian Accessibility Team +Uploaders: Samuel Thibault +Homepage: http://pages.infinit.net/fluxnic/cicero/ +Standards-Version: 3.9.6 +Vcs-Browser: http://git.debian.org/?p=pkg-a11y/cicero.git;a=summary +Vcs-Git: git://git.debian.org/git/pkg-a11y/cicero.git +Build-Depends: debhelper (>= 7) +Package-List: + cicero deb contrib/sound optional arch=all +Checksums-Sha1: + a286efd63fe2c9c9f7bb30255c3d6fcdcf390b43 96527 cicero_0.7.2.orig.tar.gz + 0815282053f21601b0ec4adf7a8fe47eace3c0bc 3964 cicero_0.7.2-3.diff.gz +Checksums-Sha256: + 63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786 96527 cicero_0.7.2.orig.tar.gz + f039c9642fe15c75bed5254315e2a29f9f2700da0e29d9b0729b3ffc46c8971c 3964 cicero_0.7.2-3.diff.gz +Files: + 4353dede07c5728319ba7f5595a7230a 96527 cicero_0.7.2.orig.tar.gz + a93661b6a48db48d59ba7d26796fc9ce 3964 cicero_0.7.2-3.diff.gz + +-----BEGIN PGP SIGNATURE----- +Version: GnuPG v1 + +iQIcBAEBCgAGBQJUQ9GjAAoJEBH0lP5vGG7NTFMQAIk5Wkicp5/GQOfkFh5qT7X7 +cKd98i/7t/0HznGCPv1iaQwsky5wbdqimMaW+vnKWEj8P2AEOLmyfGAjAKGSj0yW +r28dB0+vaiy1rFbtuTL+AVrtO2b/uVuh9eA2ZhDgLekv//bSzpMorIF+uqdQS18d +x2y9ZyKOucVPc+ARTcTrOmPbKR7ywIZEaj3E0Lq5p1e50BkqHVbZzzM7dMZuyatH +FcTsoCjz9kiulGx4LGzItajMBOdA2lIK4TlBRsO6wApOIvOnhSEQr5CqwbVwzwGv +N//8EoiNbs5bpweOGXOLN/RzvRPaEp/8W5P+E7jKyyiGkBeBrQeDlJA5cqBXcz1G +63zVmLyp3AYDrRaQ1AvgUyoL91mQIsDwc2gwT3YRYc4TE4HtYCAD85e/NGCAG5mk +vy+WH6NaaU6mb17IN7V+mGgb/l5pgwPADP4VaFugjrZK7nJp6I2xK2FmgDlGw8gj +qC2LUVuI/ijxTkxS9KdGSwtF4YLw6hbhUIv+19n5ajJ8MpTniv3hPiG4ZYY0qc7j +oejiRGszAR9syTjPKHhYpBnKwTVg8dkaOI+Jw+uwlK5W0opKoDt4Kr4ceCxuxsvU +L1I0MtaTGsGABJTX6utGvklYROApAcqMzGYozNeYOuGlWpvBx5QqdTmo6yv515cq +vWwMF6ldOni8Da5B/7Q9 +=XtIw +-----END PGP SIGNATURE----- diff --git a/swh/loader/package/tests/data/deb.debian.org/debian__pool_contrib_c_cicero_cicero_0.7.2.orig.tar.gz b/swh/loader/package/tests/data/deb.debian.org/debian__pool_contrib_c_cicero_cicero_0.7.2.orig.tar.gz new file mode 100644 index 0000000..aa0a389 Binary files /dev/null and b/swh/loader/package/tests/data/deb.debian.org/debian__pool_contrib_c_cicero_cicero_0.7.2.orig.tar.gz differ diff --git a/swh/loader/package/tests/data/deb.debian.org/onefile.txt b/swh/loader/package/tests/data/deb.debian.org/onefile.txt new file mode 100644 index 0000000..1d62cd2 --- /dev/null +++ b/swh/loader/package/tests/data/deb.debian.org/onefile.txt @@ -0,0 +1 @@ +This is a file to retrieve information from in a test context diff --git a/swh/loader/package/tests/test_debian.py b/swh/loader/package/tests/test_debian.py new file mode 100644 index 0000000..bd17a30 --- /dev/null +++ b/swh/loader/package/tests/test_debian.py @@ -0,0 +1,137 @@ +# Copyright (C) 2019 The Software Heritage developers +# See the AUTHORS file at the top-level directory of this distribution +# License: GNU General Public License version 3, or any later version +# See top-level LICENSE file for more information + +from os import path + +from swh.loader.package.debian import ( + DebianLoader, get_file_info, download_package +) +from swh.loader.package.tests.common import check_snapshot + + +PACKAGE_FILES = { + 'files': { + 'cicero_0.7.2-3.diff.gz': { + 'md5sum': 'a93661b6a48db48d59ba7d26796fc9ce', + 'name': 'cicero_0.7.2-3.diff.gz', + 'sha256': 'f039c9642fe15c75bed5254315e2a29f9f2700da0e29d9b0729b3ffc46c8971c', # noqa + 'size': 3964, + 'uri': 'http://deb.debian.org/debian//pool/contrib/c/cicero/cicero_0.7.2-3.diff.gz' # noqa + }, + 'cicero_0.7.2-3.dsc': { + 'md5sum': 'd5dac83eb9cfc9bb52a15eb618b4670a', + 'name': 'cicero_0.7.2-3.dsc', + 'sha256': '35b7f1048010c67adfd8d70e4961aefd8800eb9a83a4d1cc68088da0009d9a03', # noqa + 'size': 1864, + 'uri': 'http://deb.debian.org/debian//pool/contrib/c/cicero/cicero_0.7.2-3.dsc'}, # noqa + 'cicero_0.7.2.orig.tar.gz': { + 'md5sum': '4353dede07c5728319ba7f5595a7230a', + 'name': 'cicero_0.7.2.orig.tar.gz', + 'sha256': '63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786', # noqa + 'size': 96527, + 'uri': 'http://deb.debian.org/debian//pool/contrib/c/cicero/cicero_0.7.2.orig.tar.gz' # noqa + } + }, + 'id': 23, + 'name': 'cicero', + 'revision_id': None, + 'version': '0.7.2-3' +} + + +PACKAGE_PER_VERSION = { + 'stretch/contrib/0.7.2-3': PACKAGE_FILES +} + + +def test_get_file_info(datadir): + filepath = path.join(datadir, 'deb.debian.org', 'onefile.txt') + actual_info = get_file_info(filepath) + expected_info = { + 'name': 'onefile.txt', + 'length': 62, + 'sha1': '135572f4ac013f49e624612301f9076af1eacef2', + 'sha1_git': '1d62cd247ef251d52d98bbd931d44ad1f967ea99', + 'sha256': '40f1a3cbe9355879319759bae1a6ba09cbf34056e79e951cd2dc0adbff169b9f', # noqa + 'blake2s256': '4072cf9a0017ad7705a9995bbfbbc098276e6a3afea8d84ab54bff6381c897ab', # noqa + } + + assert actual_info == expected_info + + +def test_download_package(datadir, tmpdir, requests_mock_http_datadir): + tmpdir = str(tmpdir) # py3.5 work around (LocalPath issue) + all_hashes = download_package(PACKAGE_FILES, tmpdir) + assert all_hashes == { + 'cicero_0.7.2-3.diff.gz': { + 'checksums': { + 'blake2s256': '08b1c438e70d2474bab843d826515147fa4a817f8c4baaf3ddfbeb5132183f21', # noqa + 'sha1': '0815282053f21601b0ec4adf7a8fe47eace3c0bc', + 'sha1_git': '834ac91da3a9da8f23f47004bb456dd5bd16fe49', + 'sha256': 'f039c9642fe15c75bed5254315e2a29f9f2700da0e29d9b0729b3ffc46c8971c' # noqa + }, + 'filename': 'cicero_0.7.2-3.diff.gz', + 'length': 3964}, + 'cicero_0.7.2-3.dsc': { + 'checksums': { + 'blake2s256': '8c002bead3e35818eaa9d00826f3d141345707c58fb073beaa8abecf4bde45d2', # noqa + 'sha1': 'abbec4e8efbbc80278236e1dd136831eac08accd', + 'sha1_git': '1f94b2086fa1142c2df6b94092f5c5fa11093a8e', + 'sha256': '35b7f1048010c67adfd8d70e4961aefd8800eb9a83a4d1cc68088da0009d9a03' # noqa + }, + 'filename': 'cicero_0.7.2-3.dsc', + 'length': 1864}, + 'cicero_0.7.2.orig.tar.gz': { + 'checksums': { + 'blake2s256': '9809aa8d2e2dad7f34cef72883db42b0456ab7c8f1418a636eebd30ab71a15a6', # noqa + 'sha1': 'a286efd63fe2c9c9f7bb30255c3d6fcdcf390b43', + 'sha1_git': 'aa0a38978dce86d531b5b0299b4a616b95c64c74', + 'sha256': '63f40f2436ea9f67b44e2d4bd669dbabe90e2635a204526c20e0b3c8ee957786' # noqa + }, + 'filename': 'cicero_0.7.2.orig.tar.gz', + 'length': 96527}} + + +def test_debian_first_visit( + swh_config, requests_mock_http_datadir): + """With no prior visit, load a gnu project ends up with 1 snapshot + + """ + loader = DebianLoader( + url='deb://Debian/packages/cicero', + date='2019-10-12T05:58:09.165557+00:00', + packages=PACKAGE_PER_VERSION) + + actual_load_status = loader.load() + assert actual_load_status['status'] == 'eventful' + + stats = loader.storage.stat_counters() + assert { + 'content': 42, + 'directory': 2, + 'origin': 1, + 'origin_visit': 1, + 'person': 1, + 'release': 0, + 'revision': 1, # all artifacts under 1 revision + 'skipped_content': 0, + 'snapshot': 1 + } == stats + + expected_snapshot = { + 'id': 'a59ec49a01ff329dcbbc63fd36a5654143aef240', + 'branches': { + 'HEAD': { + 'target_type': 'alias', + 'target': 'releases/stretch/contrib/0.7.2-3' + }, + 'releases/stretch/contrib/0.7.2-3': { + 'target_type': 'revision', + 'target': '2807f5b3f84368b4889a9ae827fe85854ffecf07', + } + }, + } # different than the previous loader as no release is done + + check_snapshot(expected_snapshot, loader.storage)