Page Menu
Home
Software Heritage
Search
Configure Global Search
Log In
Files
F9311753
No One
Temporary
Actions
View File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Flag For Later
Size
84 KB
Subscribers
None
View Options
diff --git a/swh/loader/package/gnu.py b/swh/loader/package/gnu.py
index 6cf6d16..c4eea96 100644
--- a/swh/loader/package/gnu.py
+++ b/swh/loader/package/gnu.py
@@ -1,168 +1,170 @@
# Copyright (C) 2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import re
from os import path
from typing import Generator, Dict, Tuple, Sequence
from swh.loader.package.loader import PackageLoader
from swh.model.identifiers import normalize_timestamp
# to recognize existing naming pattern
extensions = [
'zip',
'tar',
'gz', 'tgz',
'bz2', 'bzip2',
'lzma', 'lz',
'xz',
'Z',
]
version_keywords = [
'cygwin_me',
'w32', 'win32', 'nt', 'cygwin', 'mingw',
'latest', 'alpha', 'beta',
'release', 'stable',
'hppa',
'solaris', 'sunos', 'sun4u', 'sparc', 'sun',
'aix', 'ibm', 'rs6000',
'i386', 'i686',
'linux', 'redhat', 'linuxlibc',
'mips',
'powerpc', 'macos', 'apple', 'darwin', 'macosx', 'powermacintosh',
'unknown',
'netbsd', 'freebsd',
'sgi', 'irix',
]
# Match a filename into components.
#
# We use Debian's release number heuristic: A release number starts
# with a digit, and is followed by alphanumeric characters or any of
# ., +, :, ~ and -
#
# We hardcode a list of possible extensions, as this release number
# scheme would match them too... We match on any combination of those.
#
# Greedy matching is done right to left (we only match the extension
# greedily with +, software_name and release_number are matched lazily
# with +? and *?).
pattern = r'''
^
(?:
# We have a software name and a release number, separated with a
# -, _ or dot.
(?P<software_name1>.+?[-_.])
(?P<release_number>(%(vkeywords)s|[0-9][0-9a-zA-Z_.+:~-]*?)+)
|
# We couldn't match a release number, put everything in the
# software name.
(?P<software_name2>.+?)
)
(?P<extension>(?:\.(?:%(extensions)s))+)
$
''' % {
'extensions': '|'.join(extensions),
'vkeywords': '|'.join('%s[-]?' % k for k in version_keywords),
}
def get_version(url: str) -> str:
"""Extract branch name from tarball url
Args:
url (str): Tarball URL
Returns:
byte: Branch name
Example:
For url = https://ftp.gnu.org/gnu/8sync/8sync-0.2.0.tar.gz
>>> get_version(url)
'0.2.0'
"""
filename = path.split(url)[-1]
m = re.match(pattern, filename,
flags=re.VERBOSE | re.IGNORECASE)
if m:
d = m.groupdict()
if d['software_name1'] and d['release_number']:
return d['release_number']
if d['software_name2']:
return d['software_name2']
return ''
class GNULoader(PackageLoader):
visit_type = 'gnu'
SWH_PERSON = {
'name': b'Software Heritage',
'fullname': b'Software Heritage',
'email': b'robot@softwareheritage.org'
}
REVISION_MESSAGE = b'swh-loader-package: synthetic revision message'
def __init__(self, package: str, package_url: str, tarballs: Sequence):
"""Loader constructor.
For now, this is the lister's task output.
Args:
package: Package's name (unused)
package_url: Origin url
tarballs: List of dict with keys `date` (date) and `archive` (str)
the url to retrieve one versioned archive
"""
super().__init__(url=package_url)
self.tarballs = list(sorted(tarballs, key=lambda v: int(v['date'])))
def get_versions(self) -> Sequence[str]:
versions = []
for archive in self.tarballs:
v = get_version(archive['archive'])
if v:
versions.append(v)
return versions
def get_default_release(self) -> str:
# It's the most recent, so for this loader, it's the last one
return get_version(self.tarballs[-1]['archive'])
def get_artifacts(self, version: str) -> Generator[
Tuple[str, str, Dict], None, None]:
for a_metadata in self.tarballs:
url = a_metadata['archive']
filename = path.split(url)[-1]
yield filename, url, a_metadata
def build_revision(
- self, a_metadata: Dict, a_uncompressed_path: str) -> Dict:
-
+ self, a_metadata: Dict, a_uncompressed_path: str,
+ visit_date: str) -> Dict:
normalized_date = normalize_timestamp(int(a_metadata['date']))
return {
'message': self.REVISION_MESSAGE,
'date': normalized_date,
'author': self.SWH_PERSON,
'committer': self.SWH_PERSON,
'committer_date': normalized_date,
'parents': [],
'metadata': {
- 'package': {
- 'date': a_metadata['date'],
- 'archive': a_metadata['archive'],
+ 'intrinsic': {},
+ 'extrinsic': {
+ 'provider': self.url,
+ 'when': visit_date,
+ 'raw': a_metadata,
},
},
}
diff --git a/swh/loader/package/loader.py b/swh/loader/package/loader.py
index c6ca625..7686ab6 100644
--- a/swh/loader/package/loader.py
+++ b/swh/loader/package/loader.py
@@ -1,359 +1,353 @@
# Copyright (C) 2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import datetime
import logging
import tempfile
import os
from typing import Generator, Dict, Tuple, Sequence, List, Optional
from swh.core.tarball import uncompress
from swh.core.config import SWHConfig
from swh.model.from_disk import Directory
from swh.model.identifiers import (
revision_identifier, snapshot_identifier, identifier_to_bytes
)
from swh.storage import get_storage
from swh.storage.algos.snapshot import snapshot_get_all_branches
from swh.loader.core.converters import content_for_storage
from swh.loader.package.utils import download
logger = logging.getLogger(__name__)
# Not implemented yet:
# - clean up disk routines from previous killed workers (when OOMkilled)
# -> separation of concern would like this to be abstracted from the code
# -> experience tells us it's complicated to do as such (T903, T964, T982,
# etc...)
#
# - model: swh.model.merkle.from_disk should output swh.model.model.* objects
# to avoid this layer's conversion routine call
# -> Take this up within swh.model's current implementation
class PackageLoader:
# Origin visit type (str) set by the loader
visit_type = ''
- # Url providing the artifact information
- provider_url = ''
def __init__(self, url):
"""Loader's constructor. This raises exception if the minimal required
configuration is missing (cf. fn:`check` method).
Args:
url (str): Origin url to load data from
"""
# This expects to use the environment variable SWH_CONFIG_FILENAME
self.config = SWHConfig.parse_config_file()
self._check_configuration()
self.storage = get_storage(**self.config['storage'])
self.url = url
def _check_configuration(self):
"""Checks the minimal configuration required is set for the loader.
If some required configuration is missing, exception detailing the
issue is raised.
"""
if 'storage' not in self.config:
raise ValueError(
'Misconfiguration, at least the storage key should be set')
def get_versions(self) -> Sequence[str]:
"""Return the list of all published package versions.
Returns:
Sequence of published versions
"""
return []
def get_artifacts(self, version: str) -> Generator[
Tuple[str, str, Dict], None, None]:
"""Given a release version of a package, retrieve the associated
artifact information for such version.
Args:
version: Package version
Returns:
(artifact filename, artifact uri, raw artifact metadata)
"""
yield from {}
def build_revision(
self, a_metadata: Dict, a_uncompressed_path: str) -> Dict:
"""Build the revision dict
Returns:
SWH data dict
"""
return {}
def get_default_release(self) -> str:
"""Retrieve the latest release version
Returns:
Latest version
"""
return ''
def last_snapshot(self) -> Optional[Dict]:
"""Retrieve the last snapshot
"""
visit = self.storage.origin_visit_get_latest(
self.url, require_snapshot=True)
if visit:
return snapshot_get_all_branches(
self.storage, visit['snapshot']['id'])
def known_artifacts(self, snapshot: Dict) -> [Dict]:
"""Retrieve the known releases/artifact for the origin.
Args
snapshot: snapshot for the visit
Returns:
Dict of keys revision id (bytes), values a metadata Dict.
"""
if not snapshot or 'branches' not in snapshot:
return {}
# retrieve only revisions (e.g the alias we do not want here)
revs = [rev['target']
for rev in snapshot['branches'].values()
if rev and rev['target_type'] == 'revision']
known_revisions = self.storage.revision_get(revs)
ret = {}
for revision in known_revisions:
if not revision: # revision_get can return None
continue
original_artifact = revision['metadata'].get('original_artifact')
if original_artifact:
ret[revision['id']] = original_artifact
return ret
def resolve_revision_from(
self, known_artifacts: Dict, artifact_metadata: Dict) \
-> Optional[bytes]:
"""Resolve the revision from a snapshot and an artifact metadata dict.
If the artifact has already been downloaded, this will return the
existing revision targeting that uncompressed artifact directory.
Otherwise, this returns None.
Args:
snapshot: Snapshot
artifact_metadata: Information dict
Returns:
None or revision identifier
"""
return None
def load(self) -> Dict:
"""Load for a specific origin the associated contents.
for each package version of the origin
1. Fetch the files for one package version By default, this can be
implemented as a simple HTTP request. Loaders with more specific
requirements can override this, e.g.: the PyPI loader checks the
integrity of the downloaded files; the Debian loader has to download
and check several files for one package version.
2. Extract the downloaded files By default, this would be a universal
archive/tarball extraction.
Loaders for specific formats can override this method (for instance,
the Debian loader uses dpkg-source -x).
3. Convert the extracted directory to a set of Software Heritage
objects Using swh.model.from_disk.
4. Extract the metadata from the unpacked directories This would only
be applicable for "smart" loaders like npm (parsing the
package.json), PyPI (parsing the PKG-INFO file) or Debian (parsing
debian/changelog and debian/control).
On "minimal-metadata" sources such as the GNU archive, the lister
should provide the minimal set of metadata needed to populate the
revision/release objects (authors, dates) as an argument to the
task.
5. Generate the revision/release objects for the given version. From
the data generated at steps 3 and 4.
end for each
6. Generate and load the snapshot for the visit
Using the revisions/releases collected at step 5., and the branch
information from step 0., generate a snapshot and load it into the
Software Heritage archive
"""
status_load = 'uneventful' # either: eventful, uneventful, failed
status_visit = 'full' # either: partial, full
tmp_revisions: Dict[str, List] = {}
snapshot = None
try:
# Prepare origin and origin_visit
origin = {'url': self.url}
self.storage.origin_add([origin])
visit_date = datetime.datetime.now(tz=datetime.timezone.utc)
visit_id = self.storage.origin_visit_add(
origin=self.url,
date=visit_date,
type=self.visit_type)['visit']
last_snapshot = self.last_snapshot()
logger.debug('last snapshot: %s', last_snapshot)
known_artifacts = self.known_artifacts(last_snapshot)
logger.debug('known artifacts: %s', known_artifacts)
# Retrieve the default release (the "latest" one)
default_release = self.get_default_release()
logger.debug('default release: %s', default_release)
for version in self.get_versions(): # for each
logger.debug('version: %s', version)
tmp_revisions[version] = []
# `a_` stands for `artifact_`
for a_filename, a_uri, a_metadata in self.get_artifacts(
version):
logger.debug('a_metadata: %s', a_metadata)
revision_id = self.resolve_revision_from(
known_artifacts, a_metadata)
if revision_id is None:
with tempfile.TemporaryDirectory() as tmpdir:
try:
# a_c_: archive_computed_
a_path, a_c_metadata = download(
a_uri, dest=tmpdir)
except Exception as e:
logger.warning(
'Unable to retrieve %s. Reason: %s',
a_uri, e)
status_visit = 'partial'
continue
logger.debug('archive_path: %s', a_path)
logger.debug('archive_computed_metadata: %s',
a_c_metadata)
uncompressed_path = os.path.join(tmpdir, 'src')
uncompress(a_path, dest=uncompressed_path)
logger.debug('uncompressed_path: %s',
uncompressed_path)
directory = Directory.from_disk(
path=uncompressed_path.encode('utf-8'), data=True) # noqa
# FIXME: Try not to load the full raw content in
# memory
objects = directory.collect()
contents = objects['content'].values()
logger.debug('Number of contents: %s',
len(contents))
self.storage.content_add(
map(content_for_storage, contents))
status_load = 'eventful'
directories = objects['directory'].values()
logger.debug('Number of directories: %s',
len(directories))
self.storage.directory_add(directories)
# FIXME: This should be release. cf. D409
revision = self.build_revision(
- a_metadata, uncompressed_path)
+ a_metadata, uncompressed_path,
+ visit_date.isoformat())
revision.update({
'type': 'tar',
'synthetic': True,
'directory': directory.hash,
})
revision['metadata'].update({
'original_artifact': a_c_metadata,
- 'extrinsic': {
- 'provider': self.provider_url,
- 'when': visit_date,
- 'raw': a_metadata,
- },
})
revision['id'] = revision_id = identifier_to_bytes(
revision_identifier(revision))
logger.debug('Revision: %s', revision)
self.storage.revision_add([revision])
tmp_revisions[version].append({
'filename': a_filename,
'target': revision_id,
})
# Build and load the snapshot
branches = {}
for version, v_branches in tmp_revisions.items():
if len(v_branches) == 1:
branch_name = ('releases/%s' % version).encode('utf-8')
if version == default_release:
branches[b'HEAD'] = {
'target_type': 'alias',
'target': branch_name,
}
branches[branch_name] = {
'target_type': 'revision',
'target': v_branches[0]['target'],
}
else:
for x in v_branches:
branch_name = ('releases/%s/%s' % (
version, v_branches['filename'])).encode('utf-8')
branches[branch_name] = {
'target_type': 'revision',
'target': x['target'],
}
snapshot = {
'branches': branches
}
snapshot['id'] = identifier_to_bytes(
snapshot_identifier(snapshot))
logger.debug('snapshot: %s', snapshot)
self.storage.snapshot_add([snapshot])
if hasattr(self.storage, 'flush'):
self.storage.flush()
except Exception as e:
logger.warning('Fail to load %s. Reason: %s' % (self.url, e))
status_visit = 'partial'
finally:
self.storage.origin_visit_update(
origin=self.url, visit_id=visit_id, status=status_visit,
snapshot=snapshot)
return {'status': status_load}
diff --git a/swh/loader/package/npm.py b/swh/loader/package/npm.py
index 8c3680f..77762d9 100644
--- a/swh/loader/package/npm.py
+++ b/swh/loader/package/npm.py
@@ -1,286 +1,292 @@
# Copyright (C) 2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import json
import logging
import os
import re
from codecs import BOM_UTF8
from typing import Generator, Dict, Tuple, Sequence, Optional
import chardet
import iso8601
from swh.model.identifiers import normalize_timestamp
from swh.loader.package.loader import PackageLoader
from swh.loader.package.utils import api_info
logger = logging.getLogger(__name__)
_EMPTY_AUTHOR = {'fullname': b'', 'name': None, 'email': None}
# https://github.com/jonschlinkert/author-regex
_author_regexp = r'([^<(]+?)?[ \t]*(?:<([^>(]+?)>)?[ \t]*(?:\(([^)]+?)\)|$)'
def parse_npm_package_author(author_str):
"""
Parse npm package author string.
It works with a flexible range of formats, as detailed below::
name
name <email> (url)
name <email>(url)
name<email> (url)
name<email>(url)
name (url) <email>
name (url)<email>
name(url) <email>
name(url)<email>
name (url)
name(url)
name <email>
name<email>
<email> (url)
<email>(url)
(url) <email>
(url)<email>
<email>
(url)
Args:
author_str (str): input author string
Returns:
dict: A dict that may contain the following keys:
* name
* email
* url
"""
author = {}
matches = re.findall(_author_regexp,
author_str.replace('<>', '').replace('()', ''),
re.M)
for match in matches:
if match[0].strip():
author['name'] = match[0].strip()
if match[1].strip():
author['email'] = match[1].strip()
if match[2].strip():
author['url'] = match[2].strip()
return author
def extract_npm_package_author(package_json):
"""
Extract package author from a ``package.json`` file content and
return it in swh format.
Args:
package_json (dict): Dict holding the content of parsed
``package.json`` file
Returns:
dict: A dict with the following keys:
* fullname
* name
* email
"""
def _author_str(author_data):
if type(author_data) is dict:
author_str = ''
if 'name' in author_data:
author_str += author_data['name']
if 'email' in author_data:
author_str += ' <%s>' % author_data['email']
return author_str
elif type(author_data) is list:
return _author_str(author_data[0]) if len(author_data) > 0 else ''
else:
return author_data
author_data = {}
for author_key in ('author', 'authors'):
if author_key in package_json:
author_str = _author_str(package_json[author_key])
author_data = parse_npm_package_author(author_str)
name = author_data.get('name')
email = author_data.get('email')
fullname = None
if name and email:
fullname = '%s <%s>' % (name, email)
elif name:
fullname = name
if not fullname:
return _EMPTY_AUTHOR
if fullname:
fullname = fullname.encode('utf-8')
if name:
name = name.encode('utf-8')
if email:
email = email.encode('utf-8')
return {'fullname': fullname, 'name': name, 'email': email}
def _lstrip_bom(s, bom=BOM_UTF8):
if s.startswith(bom):
return s[len(bom):]
else:
return s
def load_json(json_bytes):
"""
Try to load JSON from bytes and return a dictionary.
First try to decode from utf-8. If the decoding failed,
try to detect the encoding and decode again with replace
error handling.
If JSON is malformed, an empty dictionary will be returned.
Args:
json_bytes (bytes): binary content of a JSON file
Returns:
dict: JSON data loaded in a dictionary
"""
json_data = {}
try:
json_str = _lstrip_bom(json_bytes).decode('utf-8')
except UnicodeDecodeError:
encoding = chardet.detect(json_bytes)['encoding']
if encoding:
json_str = json_bytes.decode(encoding, 'replace')
try:
json_data = json.loads(json_str)
except json.decoder.JSONDecodeError:
pass
return json_data
def extract_intrinsic_metadata(dir_path: str) -> Dict:
"""Given an uncompressed path holding the pkginfo file, returns a
pkginfo parsed structure as a dict.
The release artifact contains at their root one folder. For example:
$ tar tvf zprint-0.0.6.tar.gz
drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/
...
Args:
dir_path (str): Path to the uncompressed directory
representing a release artifact from npm.
Returns:
the pkginfo parsed structure as a dict if any or None if
none was present.
"""
# Retrieve the root folder of the archive
if not os.path.exists(dir_path):
return {}
lst = os.listdir(dir_path)
if len(lst) == 0:
return {}
project_dirname = lst[0]
package_json_path = os.path.join(dir_path, project_dirname, 'package.json')
if not os.path.exists(package_json_path):
return {}
with open(package_json_path, 'rb') as package_json_file:
package_json_bytes = package_json_file.read()
return load_json(package_json_bytes)
class NpmLoader(PackageLoader):
visit_type = 'npm'
def __init__(self, package_name, package_url, package_metadata_url):
super().__init__(url=package_url)
self.provider_url = package_metadata_url
self._info = None
self._versions = None
# if package_url is None:
# package_url = 'https://www.npmjs.com/package/%s' % package_name
# if package_metadata_url is None:
# package_metadata_url = 'https://replicate.npmjs.com/%s/' %\
# quote(package_name, safe='')
@property
def info(self) -> Dict:
"""Return the project metadata information (fetched from npm registry)
"""
if not self._info:
self._info = api_info(self.provider_url)
return self._info
def get_versions(self) -> Sequence[str]:
return sorted(self.info['versions'].keys())
def get_default_release(self) -> str:
return self.info['dist-tags'].get('latest', '')
def get_artifacts(self, version: str) -> Generator[
Tuple[str, str, Dict], None, None]:
meta = self.info['versions'][version]
url = meta['dist']['tarball']
filename = os.path.basename(url)
yield filename, url, meta
def resolve_revision_from(
self, known_artifacts: Dict, artifact_metadata: Dict) \
-> Optional[bytes]:
shasum = artifact_metadata['dist']['shasum']
for rev_id, known_artifact in known_artifacts.items():
if shasum == known_artifact['checksums']['sha1']:
return rev_id
def build_revision(
- self, a_metadata: Dict, a_uncompressed_path: str) -> Dict:
+ self, a_metadata: Dict, a_uncompressed_path: str,
+ visit_date: str) -> Dict:
# Parse metadata (project, artifact metadata)
i_metadata = extract_intrinsic_metadata(a_uncompressed_path)
# from intrinsic metadata
author = extract_npm_package_author(i_metadata)
# extrinsic metadata
version = i_metadata['version']
date = self.info['time'][version]
date = iso8601.parse_date(date)
date = normalize_timestamp(int(date.timestamp()))
message = version.encode('ascii')
return {
'author': author,
'date': date,
'committer': author,
'committer_date': date,
'message': message,
+ 'parents': [],
'metadata': {
'intrinsic': {
'tool': 'package.json',
'raw': i_metadata,
- }
+ },
+ 'extrinsic': {
+ 'provider': self.provider_url,
+ 'when': visit_date,
+ 'raw': a_metadata,
+ },
},
- 'parents': [],
}
diff --git a/swh/loader/package/pypi.py b/swh/loader/package/pypi.py
index c5a540b..d3d22ce 100644
--- a/swh/loader/package/pypi.py
+++ b/swh/loader/package/pypi.py
@@ -1,173 +1,179 @@
# Copyright (C) 2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import os
from typing import Generator, Dict, Tuple, Sequence, Optional
from urllib.parse import urlparse
from pkginfo import UnpackedSDist
import iso8601
from swh.model.identifiers import normalize_timestamp
from swh.loader.package.loader import PackageLoader
from swh.loader.package.utils import api_info
def pypi_api_url(url: str) -> str:
"""Compute api url from a project url
Args:
url (str): PyPI instance's url (e.g: https://pypi.org/project/requests)
This deals with correctly transforming the project's api url (e.g
https://pypi.org/pypi/requests/json)
Returns:
api url
"""
p_url = urlparse(url)
project_name = p_url.path.split('/')[-1]
url = '%s://%s/pypi/%s/json' % (p_url.scheme, p_url.netloc, project_name)
return url
def extract_intrinsic_metadata(dir_path: str) -> Dict:
"""Given an uncompressed path holding the pkginfo file, returns a
pkginfo parsed structure as a dict.
The release artifact contains at their root one folder. For example:
$ tar tvf zprint-0.0.6.tar.gz
drwxr-xr-x root/root 0 2018-08-22 11:01 zprint-0.0.6/
...
Args:
dir_path (str): Path to the uncompressed directory
representing a release artifact from pypi.
Returns:
the pkginfo parsed structure as a dict if any or None if
none was present.
"""
# Retrieve the root folder of the archive
if not os.path.exists(dir_path):
return {}
lst = os.listdir(dir_path)
if len(lst) != 1:
return {}
project_dirname = lst[0]
pkginfo_path = os.path.join(dir_path, project_dirname, 'PKG-INFO')
if not os.path.exists(pkginfo_path):
return {}
pkginfo = UnpackedSDist(pkginfo_path)
raw = pkginfo.__dict__
raw.pop('filename') # this gets added with the ondisk location
return raw
def author(data: Dict) -> Dict:
"""Given a dict of project/release artifact information (coming from
PyPI), returns an author subset.
Args:
data (dict): Representing either artifact information or
release information.
Returns:
swh-model dict representing a person.
"""
name = data.get('author')
email = data.get('author_email')
if email:
fullname = '%s <%s>' % (name, email)
else:
fullname = name
if not fullname:
return {'fullname': b'', 'name': None, 'email': None}
fullname = fullname.encode('utf-8')
if name is not None:
name = name.encode('utf-8')
if email is not None:
email = email.encode('utf-8')
return {'fullname': fullname, 'name': name, 'email': email}
class PyPILoader(PackageLoader):
"""Load pypi origin's artifact releases into swh archive.
"""
visit_type = 'pypi'
def __init__(self, url):
super().__init__(url=url)
self._info = None
self.provider_url = pypi_api_url(self.url)
@property
def info(self) -> Dict:
"""Return the project metadata information (fetched from pypi registry)
"""
if not self._info:
self._info = api_info(self.provider_url)
return self._info
def get_versions(self) -> Sequence[str]:
return self.info['releases'].keys()
def get_default_release(self) -> str:
return self.info['info']['version']
def get_artifacts(self, version: str) -> Generator[
Tuple[str, str, Dict], None, None]:
for meta in self.info['releases'][version]:
yield meta['filename'], meta['url'], meta
def resolve_revision_from(
self, known_artifacts: Dict, artifact_metadata: Dict) \
-> Optional[bytes]:
sha256 = artifact_metadata['digests']['sha256']
for rev_id, known_artifact in known_artifacts.items():
if sha256 == known_artifact['checksums']['sha256']:
return rev_id
def build_revision(
- self, a_metadata: Dict, a_uncompressed_path: str) -> Dict:
+ self, a_metadata: Dict, a_uncompressed_path: str,
+ visit_date: str) -> Dict:
# Parse metadata (project, artifact metadata)
metadata = extract_intrinsic_metadata(a_uncompressed_path)
# from intrinsic metadata
name = metadata['version']
_author = author(metadata)
# from extrinsic metadata
message = a_metadata.get('comment_text', '')
message = '%s: %s' % (name, message) if message else name
date = normalize_timestamp(
int(iso8601.parse_date(a_metadata['upload_time']).timestamp()))
return {
'message': message.encode('utf-8'),
'author': _author,
'date': date,
'committer': _author,
'committer_date': date,
'parents': [],
'metadata': {
'intrinsic': {
'tool': 'PKG-INFO',
'raw': metadata,
- }
+ },
+ 'extrinsic': {
+ 'provider': self.provider_url,
+ 'when': visit_date,
+ 'raw': a_metadata,
+ },
}
}
diff --git a/swh/loader/package/tests/common.py b/swh/loader/package/tests/common.py
index 81d105b..a46cd58 100644
--- a/swh/loader/package/tests/common.py
+++ b/swh/loader/package/tests/common.py
@@ -1,57 +1,101 @@
# Copyright (C) 2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
from os import path
import logging
+from typing import Dict, List, Tuple
+
from swh.model.hashutil import hash_to_bytes, hash_to_hex
logger = logging.getLogger(__file__)
+
DATADIR = path.join(path.abspath(path.dirname(__file__)), 'resources')
def decode_target(target):
"""Test helper to ease readability in test
"""
if not target:
return target
target_type = target['target_type']
if target_type == 'alias':
decoded_target = target['target'].decode('utf-8')
else:
decoded_target = hash_to_hex(target['target'])
return {
'target': decoded_target,
'target_type': target_type
}
def check_snapshot(expected_snapshot, storage):
"""Check for snapshot match.
Provide the hashes as hexadecimal, the conversion is done
within the method.
Args:
expected_snapshot (dict): full snapshot with hex ids
storage (Storage): expected storage
"""
expected_snapshot_id = expected_snapshot['id']
expected_branches = expected_snapshot['branches']
snap = storage.snapshot_get(hash_to_bytes(expected_snapshot_id))
assert snap is not None
branches = {
branch.decode('utf-8'): decode_target(target)
for branch, target in snap['branches'].items()
}
assert expected_branches == branches
+
+
+def check_metadata(metadata: Dict, key_path: str, raw_type: str):
+ """Given a metadata dict, ensure the associated key_path value is of type
+ raw_type.
+
+ Args:
+ metadata: Dict to check
+ key_path: Path to check
+ raw_type: Type to check the path with
+
+ Raises:
+ Assertion error in case of mismatch
+
+ """
+ data = metadata
+ keys = key_path.split('.')
+ for k in keys:
+ try:
+ data = data[k]
+ except (TypeError, KeyError) as e:
+ # KeyError: because path too long
+ # TypeError: data is not a dict
+ raise AssertionError(e)
+ assert isinstance(data, raw_type)
+
+
+def check_metadata_paths(metadata: Dict, paths: List[Tuple[str, str]]):
+ """Given a metadata dict, ensure the keys are of expected types
+
+ Args:
+ metadata: Dict to check
+ key_path: Path to check
+ raw_type: Type to check the path with
+
+ Raises:
+ Assertion error in case of mismatch
+
+ """
+ for key_path, raw_type in paths:
+ check_metadata(metadata, key_path, raw_type)
diff --git a/swh/loader/package/tests/test_common.py b/swh/loader/package/tests/test_common.py
index 3f49c05..95a5a9a 100644
--- a/swh/loader/package/tests/test_common.py
+++ b/swh/loader/package/tests/test_common.py
@@ -1,101 +1,175 @@
# Copyright (C) 2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import pytest
from swh.model.hashutil import hash_to_bytes
-from swh.loader.package.tests.common import decode_target, check_snapshot
+from swh.loader.package.tests.common import (
+ decode_target, check_snapshot, check_metadata, check_metadata_paths
+)
from swh.storage import get_storage
hash_hex = '43e45d56f88993aae6a0198013efa80716fd8920'
def test_decode_target_edge():
assert not decode_target(None)
def test_decode_target():
actual_alias_decode_target = decode_target({
'target_type': 'alias',
'target': b'something',
})
assert actual_alias_decode_target == {
'target_type': 'alias',
'target': 'something',
}
actual_decode_target = decode_target({
'target_type': 'revision',
'target': hash_to_bytes(hash_hex),
})
assert actual_decode_target == {
'target_type': 'revision',
'target': hash_hex,
}
def test_check_snapshot():
storage = get_storage(cls='memory', args={})
snap_id = '2498dbf535f882bc7f9a18fb16c9ad27fda7bab7'
snapshot = {
'id': hash_to_bytes(snap_id),
'branches': {
b'master': {
'target': hash_to_bytes(hash_hex),
'target_type': 'revision',
},
},
}
s = storage.snapshot_add([snapshot])
assert s == {
'snapshot:add': 1,
}
expected_snapshot = {
'id': snap_id,
'branches': {
'master': {
'target': hash_hex,
'target_type': 'revision',
}
}
}
check_snapshot(expected_snapshot, storage)
def test_check_snapshot_failure():
storage = get_storage(cls='memory', args={})
snapshot = {
'id': hash_to_bytes('2498dbf535f882bc7f9a18fb16c9ad27fda7bab7'),
'branches': {
b'master': {
'target': hash_to_bytes(hash_hex),
'target_type': 'revision',
},
},
}
s = storage.snapshot_add([snapshot])
assert s == {
'snapshot:add': 1,
}
unexpected_snapshot = {
'id': '2498dbf535f882bc7f9a18fb16c9ad27fda7bab7',
'branches': {
'master': {
'target': hash_hex,
'target_type': 'release', # wrong value
}
}
}
with pytest.raises(AssertionError):
check_snapshot(unexpected_snapshot, storage)
+
+
+def test_check_metadata():
+ metadata = {
+ 'a': {
+ 'raw': {
+ 'time': 'something',
+ },
+ },
+ 'b': [],
+ 'c': 1,
+ }
+
+ for raw_path, raw_type in [
+ ('a.raw', dict),
+ ('a.raw.time', str),
+ ('b', list),
+ ('c', int),
+ ]:
+ check_metadata(metadata, raw_path, raw_type)
+
+
+def test_check_metadata_ko():
+ metadata = {
+ 'a': {
+ 'raw': 'hello',
+ },
+ 'b': [],
+ 'c': 1,
+ }
+
+ for raw_path, raw_type in [
+ ('a.b', dict),
+ ('a.raw.time', str),
+ ]:
+ with pytest.raises(AssertionError):
+ check_metadata(metadata, raw_path, raw_type)
+
+
+def test_check_metadata_paths():
+ metadata = {
+ 'a': {
+ 'raw': {
+ 'time': 'something',
+ },
+ },
+ 'b': [],
+ 'c': 1,
+ }
+
+ check_metadata_paths(metadata, [
+ ('a.raw', dict),
+ ('a.raw.time', str),
+ ('b', list),
+ ('c', int),
+ ])
+
+
+def test_check_metadata_paths_ko():
+ metadata = {
+ 'a': {
+ 'raw': 'hello',
+ },
+ 'b': [],
+ 'c': 1,
+ }
+
+ with pytest.raises(AssertionError):
+ check_metadata_paths(metadata, [
+ ('a.b', dict),
+ ('a.raw.time', str),
+ ])
diff --git a/swh/loader/package/tests/test_gnu.py b/swh/loader/package/tests/test_gnu.py
index 55af941..181e35c 100644
--- a/swh/loader/package/tests/test_gnu.py
+++ b/swh/loader/package/tests/test_gnu.py
@@ -1,214 +1,251 @@
# Copyright (C) 2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import os
import re
from swh.model.hashutil import hash_to_bytes
from swh.loader.package.gnu import GNULoader, get_version
-from swh.loader.package.tests.common import check_snapshot
+from swh.loader.package.tests.common import (
+ check_snapshot, check_metadata_paths
+)
def test_get_version():
"""From url to branch name should yield something relevant
"""
for url, expected_branchname in [
('https://gnu.org/sthg/info-2.1.0.tar.gz', '2.1.0'),
('https://gnu.org/sthg/info-2.1.2.zip', '2.1.2'),
('https://sthg.org/gnu/sthg.tar.gz', 'sthg'),
('https://sthg.org/gnu/DLDF-1.1.4.tar.gz', '1.1.4'),
('https://sthg.org/gnu/anubis-latest.tar.bz2', 'latest'),
('https://ftp.org/gnu/aris-w32.zip', 'w32'),
('https://ftp.org/gnu/aris-w32-2.2.zip', 'w32-2.2'),
('https://ftp.org/gnu/autogen.info.tar.gz', 'autogen.info'),
('https://ftp.org/gnu/crypto-build-demo.tar.gz',
'crypto-build-demo'),
('https://ftp.org/gnu/clue+clio+xit.clisp.tar.gz',
'clue+clio+xit.clisp'),
('https://ftp.org/gnu/clue+clio.for-pcl.tar.gz',
'clue+clio.for-pcl'),
('https://ftp.org/gnu/clisp-hppa2.0-hp-hpux10.20.tar.gz',
'hppa2.0-hp-hpux10.20'),
('clisp-i386-solaris2.6.tar.gz', 'i386-solaris2.6'),
('clisp-mips-sgi-irix6.5.tar.gz', 'mips-sgi-irix6.5'),
('clisp-powerpc-apple-macos.tar.gz', 'powerpc-apple-macos'),
('clisp-powerpc-unknown-linuxlibc6.tar.gz',
'powerpc-unknown-linuxlibc6'),
('clisp-rs6000-ibm-aix3.2.5.tar.gz', 'rs6000-ibm-aix3.2.5'),
('clisp-sparc-redhat51-linux.tar.gz', 'sparc-redhat51-linux'),
('clisp-sparc-sun-solaris2.4.tar.gz', 'sparc-sun-solaris2.4'),
('clisp-sparc-sun-sunos4.1.3_U1.tar.gz',
'sparc-sun-sunos4.1.3_U1'),
('clisp-2.25.1-powerpc-apple-MacOSX.tar.gz',
'2.25.1-powerpc-apple-MacOSX'),
('clisp-2.27-PowerMacintosh-powerpc-Darwin-1.3.7.tar.gz',
'2.27-PowerMacintosh-powerpc-Darwin-1.3.7'),
('clisp-2.27-i686-unknown-Linux-2.2.19.tar.gz',
'2.27-i686-unknown-Linux-2.2.19'),
('clisp-2.28-i386-i386-freebsd-4.3-RELEASE.tar.gz',
'2.28-i386-i386-freebsd-4.3-RELEASE'),
('clisp-2.28-i686-unknown-cygwin_me-4.90-1.3.10.tar.gz',
'2.28-i686-unknown-cygwin_me-4.90-1.3.10'),
('clisp-2.29-i386-i386-freebsd-4.6-STABLE.tar.gz',
'2.29-i386-i386-freebsd-4.6-STABLE'),
('clisp-2.29-i686-unknown-cygwin_nt-5.0-1.3.12.tar.gz',
'2.29-i686-unknown-cygwin_nt-5.0-1.3.12'),
('gcl-2.5.3-ansi-japi-xdr.20030701_mingw32.zip',
'2.5.3-ansi-japi-xdr.20030701_mingw32'),
('gettext-runtime-0.13.1.bin.woe32.zip', '0.13.1.bin.woe32'),
('sather-logo_images.tar.gz', 'sather-logo_images'),
('sather-specification-000328.html.tar.gz', '000328.html')
]:
actual_branchname = get_version(url)
assert actual_branchname == expected_branchname
_expected_new_contents_first_visit = [
'e9258d81faf5881a2f96a77ba609396f82cb97ad',
'1170cf105b04b7e2822a0e09d2acf71da7b9a130',
'fbd27c3f41f2668624ffc80b7ba5db9b92ff27ac',
'0057bec9b5422aff9256af240b177ac0e3ac2608',
'2b8d0d0b43a1078fc708930c8ddc2956a86c566e',
'27de3b3bc6545d2a797aeeb4657c0e215a0c2e55',
'2e6db43f5cd764e677f416ff0d0c78c7a82ef19b',
'ae9be03bd2a06ed8f4f118d3fe76330bb1d77f62',
'edeb33282b2bffa0e608e9d2fd960fd08093c0ea',
'd64e64d4c73679323f8d4cde2643331ba6c20af9',
'7a756602914be889c0a2d3952c710144b3e64cb0',
'84fb589b554fcb7f32b806951dcf19518d67b08f',
'8624bcdae55baeef00cd11d5dfcfa60f68710a02',
'e08441aeab02704cfbd435d6445f7c072f8f524e',
'f67935bc3a83a67259cda4b2d43373bd56703844',
'809788434b433eb2e3cfabd5d591c9a659d5e3d8',
'7d7c6c8c5ebaeff879f61f37083a3854184f6c41',
'b99fec102eb24bffd53ab61fc30d59e810f116a2',
'7d149b28eaa228b3871c91f0d5a95a2fa7cb0c68',
'f0c97052e567948adf03e641301e9983c478ccff',
'7fb724242e2b62b85ca64190c31dcae5303e19b3',
'4f9709e64a9134fe8aefb36fd827b84d8b617ab5',
'7350628ccf194c2c3afba4ac588c33e3f3ac778d',
'0bb892d9391aa706dc2c3b1906567df43cbe06a2',
'49d4c0ce1a16601f1e265d446b6c5ea6b512f27c',
'6b5cc594ac466351450f7f64a0b79fdaf4435ad3',
'3046e5d1f70297e2a507b98224b6222c9688d610',
'1572607d456d7f633bc6065a2b3048496d679a31',
]
_expected_new_directories_first_visit = [
'daabc65ec75d487b1335ffc101c0ac11c803f8fc',
'263be23b4a8101d3ad0d9831319a3e0f2b065f36',
'7f6e63ba6eb3e2236f65892cd822041f1a01dd5c',
'4db0a3ecbc976083e2dac01a62f93729698429a3',
'dfef1c80e1098dd5deda664bb44a9ab1f738af13',
'eca971d346ea54d95a6e19d5051f900237fafdaa',
'3aebc29ed1fccc4a6f2f2010fb8e57882406b528',
]
_expected_new_revisions_first_visit = {
'44183488c0774ce3c957fa19ba695cf18a4a42b3':
'3aebc29ed1fccc4a6f2f2010fb8e57882406b528'
}
_expected_branches_first_visit = {
'HEAD': {
'target_type': 'alias',
'target': 'releases/0.1.0',
},
'releases/0.1.0': {
'target_type': 'revision',
'target': '44183488c0774ce3c957fa19ba695cf18a4a42b3',
},
}
# hash is different then before as we changed the snapshot
# gnu used to use `release/` (singular) instead of plural
_expected_new_snapshot_first_visit_id = 'c419397fd912039825ebdbea378bc6283f006bf5' # noqa
def test_release_artifact_not_found(swh_config, requests_mock):
package = '8sync'
package_url = 'https://ftp.gnu.org/gnu/8sync/'
tarballs = [{
'date': '944729610',
'archive': 'https://ftp.gnu.org/gnu/8sync/8sync-0.1.0.tar.gz',
}]
loader = GNULoader(package, package_url, tarballs)
requests_mock.get(re.compile('https://'), status_code=404)
actual_load_status = loader.load()
assert actual_load_status == {'status': 'uneventful'}
stats = loader.storage.stat_counters()
assert {
'content': 0,
'directory': 0,
'origin': 1,
'origin_visit': 1,
'person': 0,
'release': 0,
'revision': 0,
'skipped_content': 0,
'snapshot': 1,
} == stats
origin_visit = next(loader.storage.origin_visit_get(package_url))
assert origin_visit['status'] == 'partial'
+def test_revision_metadata_structure(swh_config, local_get):
+ package = '8sync'
+ package_url = 'https://ftp.gnu.org/gnu/8sync/'
+ tarballs = [{
+ 'date': '944729610',
+ 'archive': 'https://ftp.gnu.org/gnu/8sync/8sync-0.1.0.tar.gz',
+ }]
+
+ loader = GNULoader(package, package_url, tarballs)
+
+ actual_load_status = loader.load()
+ assert actual_load_status == {'status': 'eventful'}
+
+ expected_revision_id = hash_to_bytes(
+ '44183488c0774ce3c957fa19ba695cf18a4a42b3')
+ revision = list(loader.storage.revision_get([expected_revision_id]))[0]
+
+ assert revision is not None
+
+ assert isinstance(revision['metadata'], dict)
+ assert isinstance(revision['metadata']['intrinsic'], dict)
+ assert isinstance(revision['metadata']['extrinsic'], dict)
+ assert isinstance(revision['metadata']['original_artifact'], dict)
+
+ check_metadata_paths(revision['metadata'], paths=[
+ ('intrinsic', dict),
+ ('extrinsic.provider', str),
+ ('extrinsic.when', str),
+ ('extrinsic.raw', dict),
+ ('original_artifact.filename', str),
+ ('original_artifact.length', int),
+ ('original_artifact.checksums', dict),
+ ])
+
+
def test_release_artifact_no_prior_visit(swh_config, local_get):
"""With no prior visit, load a gnu project ends up with 1 snapshot
"""
assert 'SWH_CONFIG_FILENAME' in os.environ # cf. tox.ini
package = '8sync'
package_url = 'https://ftp.gnu.org/gnu/8sync/'
tarballs = [{
'date': '944729610',
'archive': 'https://ftp.gnu.org/gnu/8sync/8sync-0.1.0.tar.gz',
}]
loader = GNULoader(package, package_url, tarballs)
actual_load_status = loader.load()
assert actual_load_status == {'status': 'eventful'}
stats = loader.storage.stat_counters()
assert {
'content': len(_expected_new_contents_first_visit),
'directory': len(_expected_new_directories_first_visit),
'origin': 1,
'origin_visit': 1,
'person': 1,
'release': 0,
'revision': len(_expected_new_revisions_first_visit),
'skipped_content': 0,
'snapshot': 1
} == stats
expected_contents = map(hash_to_bytes, _expected_new_contents_first_visit)
assert list(loader.storage.content_missing_per_sha1(expected_contents)) \
== []
expected_dirs = map(hash_to_bytes, _expected_new_directories_first_visit)
assert list(loader.storage.directory_missing(expected_dirs)) == []
expected_revs = map(hash_to_bytes, _expected_new_revisions_first_visit)
assert list(loader.storage.revision_missing(expected_revs)) == []
expected_snapshot = {
'id': _expected_new_snapshot_first_visit_id,
'branches': _expected_branches_first_visit,
}
check_snapshot(expected_snapshot, loader.storage)
diff --git a/swh/loader/package/tests/test_npm.py b/swh/loader/package/tests/test_npm.py
index d1b95cf..6ccf13f 100644
--- a/swh/loader/package/tests/test_npm.py
+++ b/swh/loader/package/tests/test_npm.py
@@ -1,496 +1,530 @@
# Copyright (C) 2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import json
import os
from swh.model.hashutil import hash_to_bytes
from swh.loader.package.npm import (
parse_npm_package_author, extract_npm_package_author
)
-from swh.loader.package.tests.common import DATADIR, check_snapshot
+from swh.loader.package.tests.common import (
+ DATADIR, check_snapshot, check_metadata_paths
+)
from swh.loader.package.npm import NpmLoader
def _parse_author_string_test(author_str, expected_result):
assert parse_npm_package_author(author_str) == expected_result
assert parse_npm_package_author(' %s' % author_str) == expected_result
assert parse_npm_package_author('%s ' % author_str) == expected_result
def test_parse_npm_package_author():
_parse_author_string_test(
'John Doe',
{
'name': 'John Doe'
}
)
_parse_author_string_test(
'<john.doe@foo.bar>',
{
'email': 'john.doe@foo.bar'
}
)
_parse_author_string_test(
'(https://john.doe)',
{
'url': 'https://john.doe'
}
)
_parse_author_string_test(
'John Doe <john.doe@foo.bar>',
{
'name': 'John Doe',
'email': 'john.doe@foo.bar'
}
)
_parse_author_string_test(
'John Doe<john.doe@foo.bar>',
{
'name': 'John Doe',
'email': 'john.doe@foo.bar'
}
)
_parse_author_string_test(
'John Doe (https://john.doe)',
{
'name': 'John Doe',
'url': 'https://john.doe'
}
)
_parse_author_string_test(
'John Doe(https://john.doe)',
{
'name': 'John Doe',
'url': 'https://john.doe'
}
)
_parse_author_string_test(
'<john.doe@foo.bar> (https://john.doe)',
{
'email': 'john.doe@foo.bar',
'url': 'https://john.doe'
}
)
_parse_author_string_test(
'(https://john.doe) <john.doe@foo.bar>',
{
'email': 'john.doe@foo.bar',
'url': 'https://john.doe'
}
)
_parse_author_string_test(
'John Doe <john.doe@foo.bar> (https://john.doe)',
{
'name': 'John Doe',
'email': 'john.doe@foo.bar',
'url': 'https://john.doe'
}
)
_parse_author_string_test(
'John Doe (https://john.doe) <john.doe@foo.bar>',
{
'name': 'John Doe',
'email': 'john.doe@foo.bar',
'url': 'https://john.doe'
}
)
_parse_author_string_test(
'John Doe<john.doe@foo.bar> (https://john.doe)',
{
'name': 'John Doe',
'email': 'john.doe@foo.bar',
'url': 'https://john.doe'
}
)
_parse_author_string_test(
'John Doe<john.doe@foo.bar>(https://john.doe)',
{
'name': 'John Doe',
'email': 'john.doe@foo.bar',
'url': 'https://john.doe'
}
)
_parse_author_string_test('', {})
_parse_author_string_test('<>', {})
_parse_author_string_test(' <>', {})
_parse_author_string_test('<>()', {})
_parse_author_string_test('<> ()', {})
_parse_author_string_test('()', {})
_parse_author_string_test(' ()', {})
_parse_author_string_test(
'John Doe <> ()',
{
'name': 'John Doe'
}
)
_parse_author_string_test(
'John Doe <>',
{
'name': 'John Doe'
}
)
_parse_author_string_test(
'John Doe ()',
{
'name': 'John Doe'
}
)
def test_extract_npm_package_author():
package_metadata_filepath = os.path.join(
DATADIR, 'replicate.npmjs.com', 'org_visit1')
with open(package_metadata_filepath) as json_file:
package_metadata = json.load(json_file)
extract_npm_package_author(package_metadata['versions']['0.0.2']) == \
{
'fullname': b'mooz <stillpedant@gmail.com>',
'name': b'mooz',
'email': b'stillpedant@gmail.com'
}
assert (
extract_npm_package_author(package_metadata['versions']['0.0.3']) ==
{
'fullname': b'Masafumi Oyamada <stillpedant@gmail.com>',
'name': b'Masafumi Oyamada',
'email': b'stillpedant@gmail.com'
}
)
package_json = json.loads('''
{
"name": "highlightjs-line-numbers.js",
"version": "2.7.0",
"description": "Highlight.js line numbers plugin.",
"main": "src/highlightjs-line-numbers.js",
"dependencies": {},
"devDependencies": {
"gulp": "^4.0.0",
"gulp-rename": "^1.4.0",
"gulp-replace": "^0.6.1",
"gulp-uglify": "^1.2.0"
},
"repository": {
"type": "git",
"url": "https://github.com/wcoder/highlightjs-line-numbers.js.git"
},
"author": "Yauheni Pakala <evgeniy.pakalo@gmail.com>",
"license": "MIT",
"bugs": {
"url": "https://github.com/wcoder/highlightjs-line-numbers.js/issues"
},
"homepage": "http://wcoder.github.io/highlightjs-line-numbers.js/"
}''') # noqa
assert extract_npm_package_author(package_json) == \
{
'fullname': b'Yauheni Pakala <evgeniy.pakalo@gmail.com>',
'name': b'Yauheni Pakala',
'email': b'evgeniy.pakalo@gmail.com'
}
package_json = json.loads('''
{
"name": "3-way-diff",
"version": "0.0.1",
"description": "3-way diffing of JavaScript objects",
"main": "index.js",
"authors": [
{
"name": "Shawn Walsh",
"url": "https://github.com/shawnpwalsh"
},
{
"name": "Markham F Rollins IV",
"url": "https://github.com/mrollinsiv"
}
],
"keywords": [
"3-way diff",
"3 way diff",
"three-way diff",
"three way diff"
],
"devDependencies": {
"babel-core": "^6.20.0",
"babel-preset-es2015": "^6.18.0",
"mocha": "^3.0.2"
},
"dependencies": {
"lodash": "^4.15.0"
}
}''')
assert extract_npm_package_author(package_json) == \
{
'fullname': b'Shawn Walsh',
'name': b'Shawn Walsh',
'email': None
}
package_json = json.loads('''
{
"name": "yfe-ynpm",
"version": "1.0.0",
"homepage": "http://gitlab.ywwl.com/yfe/yfe-ynpm",
"repository": {
"type": "git",
"url": "git@gitlab.ywwl.com:yfe/yfe-ynpm.git"
},
"author": [
"fengmk2 <fengmk2@gmail.com> (https://fengmk2.com)",
"xufuzi <xufuzi@ywwl.com> (https://7993.org)"
],
"license": "MIT"
}''')
assert extract_npm_package_author(package_json) == \
{
'fullname': b'fengmk2 <fengmk2@gmail.com>',
'name': b'fengmk2',
'email': b'fengmk2@gmail.com'
}
package_json = json.loads('''
{
"name": "umi-plugin-whale",
"version": "0.0.8",
"description": "Internal contract component",
"authors": {
"name": "xiaohuoni",
"email": "448627663@qq.com"
},
"repository": "alitajs/whale",
"devDependencies": {
"np": "^3.0.4",
"umi-tools": "*"
},
"license": "MIT"
}''')
assert extract_npm_package_author(package_json) == \
{
'fullname': b'xiaohuoni <448627663@qq.com>',
'name': b'xiaohuoni',
'email': b'448627663@qq.com'
}
def normalize_hashes(hashes):
if isinstance(hashes, str):
return hash_to_bytes(hashes)
if isinstance(hashes, list):
return [hash_to_bytes(x) for x in hashes]
return {hash_to_bytes(k): hash_to_bytes(v) for k, v in hashes.items()}
_expected_new_contents_first_visit = normalize_hashes([
'4ce3058e16ab3d7e077f65aabf855c34895bf17c',
'858c3ceee84c8311adc808f8cdb30d233ddc9d18',
'0fa33b4f5a4e0496da6843a38ff1af8b61541996',
'85a410f8ef8eb8920f2c384a9555566ad4a2e21b',
'9163ac8025923d5a45aaac482262893955c9b37b',
'692cf623b8dd2c5df2c2998fd95ae4ec99882fb4',
'18c03aac6d3e910efb20039c15d70ab5e0297101',
'41265c42446aac17ca769e67d1704f99e5a1394d',
'783ff33f5882813dca9239452c4a7cadd4dba778',
'b029cfb85107aee4590c2434a3329bfcf36f8fa1',
'112d1900b4c2e3e9351050d1b542c9744f9793f3',
'5439bbc4bd9a996f1a38244e6892b71850bc98fd',
'd83097a2f994b503185adf4e719d154123150159',
'd0939b4898e83090ee55fd9d8a60e312cfadfbaf',
'b3523a26f7147e4af40d9d462adaae6d49eda13e',
'cd065fb435d6fb204a8871bcd623d0d0e673088c',
'2854a40855ad839a54f4b08f5cff0cf52fca4399',
'b8a53bbaac34ebb8c6169d11a4b9f13b05c583fe',
'0f73d56e1cf480bded8a1ecf20ec6fc53c574713',
'0d9882b2dfafdce31f4e77fe307d41a44a74cefe',
'585fc5caab9ead178a327d3660d35851db713df1',
'e8cd41a48d79101977e3036a87aeb1aac730686f',
'5414efaef33cceb9f3c9eb5c4cc1682cd62d14f7',
'9c3cc2763bf9e9e37067d3607302c4776502df98',
'3649a68410e354c83cd4a38b66bd314de4c8f5c9',
'e96ed0c091de1ebdf587104eaf63400d1974a1fe',
'078ca03d2f99e4e6eab16f7b75fbb7afb699c86c',
'38de737da99514de6559ff163c988198bc91367a',
])
_expected_new_directories_first_visit = normalize_hashes([
'3370d20d6f96dc1c9e50f083e2134881db110f4f',
'42753c0c2ab00c4501b552ac4671c68f3cf5aece',
'd7895533ef5edbcffdea3f057d9fef3a1ef845ce',
'80579be563e2ef3e385226fe7a3f079b377f142c',
'3b0ddc6a9e58b4b53c222da4e27b280b6cda591c',
'bcad03ce58ac136f26f000990fc9064e559fe1c0',
'5fc7e82a1bc72e074665c6078c6d3fad2f13d7ca',
'e3cd26beba9b1e02f6762ef54bd9ac80cc5f25fd',
'584b5b4b6cf7f038095e820b99386a9c232de931',
'184c8d6d0d242f2b1792ef9d3bf396a5434b7f7a',
'bb5f4ee143c970367eb409f2e4c1104898048b9d',
'1b95491047add1103db0dfdfa84a9735dcb11e88',
'a00c6de13471a2d66e64aca140ddb21ef5521e62',
'5ce6c1cd5cda2d546db513aaad8c72a44c7771e2',
'c337091e349b6ac10d38a49cdf8c2401ef9bb0f2',
'202fafcd7c0f8230e89d5496ad7f44ab12b807bf',
'775cc516543be86c15c1dc172f49c0d4e6e78235',
'ff3d1ead85a14f891e8b3fa3a89de39db1b8de2e',
])
_expected_new_revisions_first_visit = normalize_hashes({
'd8a1c7474d2956ac598a19f0f27d52f7015f117e':
'42753c0c2ab00c4501b552ac4671c68f3cf5aece',
'5f9eb78af37ffd12949f235e86fac04898f9f72a':
'3370d20d6f96dc1c9e50f083e2134881db110f4f',
'ba019b192bdb94bd0b5bd68b3a5f92b5acc2239a':
'd7895533ef5edbcffdea3f057d9fef3a1ef845ce'}
)
_expected_new_snapshot_first_visit_id = normalize_hashes(
'd0587e1195aed5a8800411a008f2f2d627f18e2d')
_expected_branches_first_visit = {
'HEAD': {
'target': 'releases/0.0.4',
'target_type': 'alias'
},
'releases/0.0.2': {
'target': 'd8a1c7474d2956ac598a19f0f27d52f7015f117e',
'target_type': 'revision'
},
'releases/0.0.3': {
'target': '5f9eb78af37ffd12949f235e86fac04898f9f72a',
'target_type': 'revision'
},
'releases/0.0.4': {
'target': 'ba019b192bdb94bd0b5bd68b3a5f92b5acc2239a',
'target_type': 'revision'
}
}
def package_url(package):
return 'https://www.npmjs.com/package/%s' % package
def package_metadata_url(package):
return 'https://replicate.npmjs.com/%s/' % package
+def test_revision_metadata_structure(swh_config, local_get):
+ package = 'org'
+ loader = NpmLoader(package,
+ package_url(package),
+ package_metadata_url(package))
+
+ actual_load_status = loader.load()
+ assert actual_load_status == {'status': 'eventful'}
+
+ expected_revision_id = hash_to_bytes(
+ 'd8a1c7474d2956ac598a19f0f27d52f7015f117e')
+ revision = list(loader.storage.revision_get([expected_revision_id]))[0]
+
+ assert revision is not None
+
+ assert isinstance(revision['metadata'], dict)
+ assert isinstance(revision['metadata']['intrinsic'], dict)
+ assert isinstance(revision['metadata']['extrinsic'], dict)
+ assert isinstance(revision['metadata']['original_artifact'], dict)
+
+ check_metadata_paths(revision['metadata'], paths=[
+ ('intrinsic.tool', str),
+ ('intrinsic.raw', dict),
+ ('extrinsic.provider', str),
+ ('extrinsic.when', str),
+ ('extrinsic.raw', dict),
+ ('original_artifact.filename', str),
+ ('original_artifact.length', int),
+ ('original_artifact.checksums', dict),
+ ])
+
+
def test_npm_loader_first_visit(swh_config, local_get):
package = 'org'
loader = NpmLoader(package,
package_url(package),
package_metadata_url(package))
actual_load_status = loader.load()
assert actual_load_status == {'status': 'eventful'}
stats = loader.storage.stat_counters()
assert {
'content': len(_expected_new_contents_first_visit),
'directory': len(_expected_new_directories_first_visit),
'origin': 1,
'origin_visit': 1,
'person': 2,
'release': 0,
'revision': len(_expected_new_revisions_first_visit),
'skipped_content': 0,
'snapshot': 1,
} == stats
assert len(list(loader.storage.content_get(
_expected_new_contents_first_visit))) == len(
_expected_new_contents_first_visit)
assert list(loader.storage.directory_missing(
_expected_new_directories_first_visit)) == []
assert list(loader.storage.revision_missing(
_expected_new_revisions_first_visit)) == []
expected_snapshot = {
'id': _expected_new_snapshot_first_visit_id,
'branches': _expected_branches_first_visit,
}
check_snapshot(expected_snapshot, loader.storage)
def test_npm_loader_incremental_visit(swh_config, local_get_visits):
package = 'org'
url = package_url(package)
metadata_url = package_metadata_url(package)
loader = NpmLoader(package, url, metadata_url)
actual_load_status = loader.load()
assert actual_load_status == {'status': 'eventful'}
origin_visit = list(loader.storage.origin_visit_get(url))[-1]
assert origin_visit['status'] == 'full'
stats = loader.storage.stat_counters()
assert {
'content': len(_expected_new_contents_first_visit),
'directory': len(_expected_new_directories_first_visit),
'origin': 1,
'origin_visit': 1,
'person': 2,
'release': 0,
'revision': len(_expected_new_revisions_first_visit),
'skipped_content': 0,
'snapshot': 1,
} == stats
loader._info = None # reset loader internal state
actual_load_status2 = loader.load()
assert actual_load_status2 == {'status': 'eventful'}
origin_visit2 = list(loader.storage.origin_visit_get(url))[-1]
assert origin_visit2['status'] == 'full'
stats = loader.storage.stat_counters()
assert { # 3 new releases artifacts
'content': len(_expected_new_contents_first_visit) + 14,
'directory': len(_expected_new_directories_first_visit) + 15,
'origin': 1,
'origin_visit': 2,
'person': 2,
'release': 0,
'revision': len(_expected_new_revisions_first_visit) + 3,
'skipped_content': 0,
'snapshot': 2,
} == stats
urls = [
m.url for m in local_get_visits.request_history
if m.url.startswith('https://registry.npmjs.org')
]
assert len(urls) == len(set(urls)) # we visited each artifact once across
diff --git a/swh/loader/package/tests/test_pypi.py b/swh/loader/package/tests/test_pypi.py
index e625690..2b3b6b1 100644
--- a/swh/loader/package/tests/test_pypi.py
+++ b/swh/loader/package/tests/test_pypi.py
@@ -1,541 +1,571 @@
# Copyright (C) 2019 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import os
from os import path
import pytest
from unittest.mock import patch
from swh.core.tarball import uncompress
from swh.model.hashutil import hash_to_bytes
from swh.loader.package.pypi import (
PyPILoader, pypi_api_url, author, extract_intrinsic_metadata
)
from swh.loader.package.tests.common import (
- check_snapshot, DATADIR
+ check_snapshot, DATADIR, check_metadata_paths
)
from swh.loader.package.tests.conftest import local_get_factory
def test_author_basic():
data = {
'author': "i-am-groot",
'author_email': 'iam@groot.org',
}
actual_author = author(data)
expected_author = {
'fullname': b'i-am-groot <iam@groot.org>',
'name': b'i-am-groot',
'email': b'iam@groot.org',
}
assert actual_author == expected_author
def test_author_empty_email():
data = {
'author': 'i-am-groot',
'author_email': '',
}
actual_author = author(data)
expected_author = {
'fullname': b'i-am-groot',
'name': b'i-am-groot',
'email': b'',
}
assert actual_author == expected_author
def test_author_empty_name():
data = {
'author': "",
'author_email': 'iam@groot.org',
}
actual_author = author(data)
expected_author = {
'fullname': b' <iam@groot.org>',
'name': b'',
'email': b'iam@groot.org',
}
assert actual_author == expected_author
def test_author_malformed():
data = {
'author': "['pierre', 'paul', 'jacques']",
'author_email': None,
}
actual_author = author(data)
expected_author = {
'fullname': b"['pierre', 'paul', 'jacques']",
'name': b"['pierre', 'paul', 'jacques']",
'email': None,
}
assert actual_author == expected_author
def test_author_malformed_2():
data = {
'author': '[marie, jeanne]',
'author_email': '[marie@some, jeanne@thing]',
}
actual_author = author(data)
expected_author = {
'fullname': b'[marie, jeanne] <[marie@some, jeanne@thing]>',
'name': b'[marie, jeanne]',
'email': b'[marie@some, jeanne@thing]',
}
assert actual_author == expected_author
def test_author_malformed_3():
data = {
'author': '[marie, jeanne, pierre]',
'author_email': '[marie@somewhere.org, jeanne@somewhere.org]',
}
actual_author = author(data)
expected_author = {
'fullname': b'[marie, jeanne, pierre] <[marie@somewhere.org, jeanne@somewhere.org]>', # noqa
'name': b'[marie, jeanne, pierre]',
'email': b'[marie@somewhere.org, jeanne@somewhere.org]',
}
actual_author == expected_author
# configuration error #
def test_badly_configured_loader_raise(monkeypatch):
"""Badly configured loader should raise"""
monkeypatch.delenv('SWH_CONFIG_FILENAME', raising=False)
with pytest.raises(ValueError) as e:
PyPILoader(url='some-url')
assert 'Misconfiguration' in e.value.args[0]
def test_pypi_api_url():
"""Compute pypi api url from the pypi project url should be ok"""
url = pypi_api_url('https://pypi.org/project/requests')
assert url == 'https://pypi.org/pypi/requests/json'
@pytest.mark.fs
def test_extract_intrinsic_metadata(tmp_path):
"""Parsing existing archive's PKG-INFO should yield results"""
uncompressed_archive_path = str(tmp_path)
archive_path = path.join(
DATADIR, 'files.pythonhosted.org', '0805nexter-1.1.0.zip')
uncompress(archive_path, dest=uncompressed_archive_path)
actual_metadata = extract_intrinsic_metadata(uncompressed_archive_path)
expected_metadata = {
'metadata_version': '1.0',
'name': '0805nexter',
'version': '1.1.0',
'summary': 'a simple printer of nested lest',
'home_page': 'http://www.hp.com',
'author': 'hgtkpython',
'author_email': '2868989685@qq.com',
'platforms': ['UNKNOWN'],
}
assert actual_metadata == expected_metadata
@pytest.mark.fs
def test_extract_intrinsic_metadata_failures(tmp_path):
"""Parsing inexistant path/archive/PKG-INFO yield None"""
# inexistant first level path
assert extract_intrinsic_metadata('/something-inexistant') == {}
# inexistant second level path (as expected by pypi archives)
assert extract_intrinsic_metadata(tmp_path) == {}
# inexistant PKG-INFO within second level path
existing_path_no_pkginfo = str(tmp_path / 'something')
os.mkdir(existing_path_no_pkginfo)
assert extract_intrinsic_metadata(tmp_path) == {}
# LOADER SCENARIO #
# "edge" cases (for the same origin) #
# no release artifact:
# {visit full, status: uneventful, no contents, etc...}
local_get_missing_all = local_get_factory(ignore_urls=[
'https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip', # noqa
'https://files.pythonhosted.org/packages/c4/a0/4562cda161dc4ecbbe9e2a11eb365400c0461845c5be70d73869786809c4/0805nexter-1.2.0.zip', # noqa
])
def test_no_release_artifact(swh_config, local_get_missing_all):
"""Load a pypi project with all artifacts missing ends up with no snapshot
"""
url = 'https://pypi.org/project/0805nexter'
loader = PyPILoader(url)
actual_load_status = loader.load()
assert actual_load_status == {'status': 'uneventful'}
stats = loader.storage.stat_counters()
assert {
'content': 0,
'directory': 0,
'origin': 1,
'origin_visit': 1,
'person': 0,
'release': 0,
'revision': 0,
'skipped_content': 0,
'snapshot': 1,
} == stats
origin_visit = next(loader.storage.origin_visit_get(url))
assert origin_visit['status'] == 'partial'
# problem during loading:
# {visit: partial, status: uneventful, no snapshot}
def test_release_with_traceback(swh_config):
url = 'https://pypi.org/project/0805nexter'
with patch('swh.loader.package.pypi.PyPILoader.get_default_release',
side_effect=ValueError('Problem')):
loader = PyPILoader(url)
actual_load_status = loader.load()
assert actual_load_status == {'status': 'uneventful'}
stats = loader.storage.stat_counters()
assert {
'content': 0,
'directory': 0,
'origin': 1,
'origin_visit': 1,
'person': 0,
'release': 0,
'revision': 0,
'skipped_content': 0,
'snapshot': 0,
} == stats
origin_visit = next(loader.storage.origin_visit_get(url))
assert origin_visit['status'] == 'partial'
# problem during loading: failure early enough in between swh contents...
# some contents (contents, directories, etc...) have been written in storage
# {visit: partial, status: eventful, no snapshot}
# problem during loading: failure late enough we can have snapshots (some
# revisions are written in storage already)
# {visit: partial, status: eventful, snapshot}
# "normal" cases (for the same origin) #
local_get_missing_one = local_get_factory(ignore_urls=[
'https://files.pythonhosted.org/packages/ec/65/c0116953c9a3f47de89e71964d6c7b0c783b01f29fa3390584dbf3046b4d/0805nexter-1.1.0.zip', # noqa
])
# some missing release artifacts:
# {visit partial, status: eventful, 1 snapshot}
+def test_revision_metadata_structure(swh_config, local_get):
+ url = 'https://pypi.org/project/0805nexter'
+ loader = PyPILoader(url)
+
+ actual_load_status = loader.load()
+ assert actual_load_status == {'status': 'eventful'}
+
+ expected_revision_id = hash_to_bytes(
+ 'e445da4da22b31bfebb6ffc4383dbf839a074d21')
+ revision = list(loader.storage.revision_get([expected_revision_id]))[0]
+
+ assert revision is not None
+
+ assert isinstance(revision['metadata'], dict)
+ assert isinstance(revision['metadata']['intrinsic'], dict)
+ assert isinstance(revision['metadata']['extrinsic'], dict)
+ assert isinstance(revision['metadata']['original_artifact'], dict)
+
+ check_metadata_paths(revision['metadata'], paths=[
+ ('intrinsic.tool', str),
+ ('intrinsic.raw', dict),
+ ('extrinsic.provider', str),
+ ('extrinsic.when', str),
+ ('extrinsic.raw', dict),
+ ('original_artifact.filename', str),
+ ('original_artifact.length', int),
+ ('original_artifact.checksums', dict),
+ ])
+
+
def test_release_with_missing_artifact(swh_config, local_get_missing_one):
"""Load a pypi project with some missing artifacts ends up with 1 snapshot
"""
url = 'https://pypi.org/project/0805nexter'
loader = PyPILoader(url)
actual_load_status = loader.load()
assert actual_load_status == {'status': 'eventful'}
stats = loader.storage.stat_counters()
assert {
'content': 3,
'directory': 2,
'origin': 1,
'origin_visit': 1,
'person': 1,
'release': 0,
'revision': 1,
'skipped_content': 0,
'snapshot': 1
} == stats
expected_contents = map(hash_to_bytes, [
'405859113963cb7a797642b45f171d6360425d16',
'e5686aa568fdb1d19d7f1329267082fe40482d31',
'83ecf6ec1114fd260ca7a833a2d165e71258c338',
])
assert list(loader.storage.content_missing_per_sha1(expected_contents))\
== []
expected_dirs = map(hash_to_bytes, [
'b178b66bd22383d5f16f4f5c923d39ca798861b4',
'c3a58f8b57433a4b56caaa5033ae2e0931405338',
])
assert list(loader.storage.directory_missing(expected_dirs)) == []
# {revision hash: directory hash}
expected_revs = {
hash_to_bytes('e445da4da22b31bfebb6ffc4383dbf839a074d21'): hash_to_bytes('b178b66bd22383d5f16f4f5c923d39ca798861b4'), # noqa
}
assert list(loader.storage.revision_missing(expected_revs)) == []
expected_branches = {
'releases/1.2.0': {
'target': 'e445da4da22b31bfebb6ffc4383dbf839a074d21',
'target_type': 'revision',
},
'HEAD': {
'target': 'releases/1.2.0',
'target_type': 'alias',
},
}
expected_snapshot = {
'id': 'dd0e4201a232b1c104433741dbf45895b8ac9355',
'branches': expected_branches,
}
check_snapshot(expected_snapshot, storage=loader.storage)
origin_visit = next(loader.storage.origin_visit_get(url))
assert origin_visit['status'] == 'partial'
def test_release_artifact_no_prior_visit(swh_config, local_get):
"""With no prior visit, load a pypi project ends up with 1 snapshot
"""
url = 'https://pypi.org/project/0805nexter'
loader = PyPILoader(url)
actual_load_status = loader.load()
assert actual_load_status == {'status': 'eventful'}
stats = loader.storage.stat_counters()
assert {
'content': 6,
'directory': 4,
'origin': 1,
'origin_visit': 1,
'person': 1,
'release': 0,
'revision': 2,
'skipped_content': 0,
'snapshot': 1
} == stats
expected_contents = map(hash_to_bytes, [
'a61e24cdfdab3bb7817f6be85d37a3e666b34566',
'938c33483285fd8ad57f15497f538320df82aeb8',
'a27576d60e08c94a05006d2e6d540c0fdb5f38c8',
'405859113963cb7a797642b45f171d6360425d16',
'e5686aa568fdb1d19d7f1329267082fe40482d31',
'83ecf6ec1114fd260ca7a833a2d165e71258c338',
])
assert list(loader.storage.content_missing_per_sha1(expected_contents))\
== []
expected_dirs = map(hash_to_bytes, [
'05219ba38bc542d4345d5638af1ed56c7d43ca7d',
'cf019eb456cf6f78d8c4674596f1c9a97ece8f44',
'b178b66bd22383d5f16f4f5c923d39ca798861b4',
'c3a58f8b57433a4b56caaa5033ae2e0931405338',
])
assert list(loader.storage.directory_missing(expected_dirs)) == []
# {revision hash: directory hash}
expected_revs = {
hash_to_bytes('4c99891f93b81450385777235a37b5e966dd1571'): hash_to_bytes('05219ba38bc542d4345d5638af1ed56c7d43ca7d'), # noqa
hash_to_bytes('e445da4da22b31bfebb6ffc4383dbf839a074d21'): hash_to_bytes('b178b66bd22383d5f16f4f5c923d39ca798861b4'), # noqa
}
assert list(loader.storage.revision_missing(expected_revs)) == []
expected_branches = {
'releases/1.1.0': {
'target': '4c99891f93b81450385777235a37b5e966dd1571',
'target_type': 'revision',
},
'releases/1.2.0': {
'target': 'e445da4da22b31bfebb6ffc4383dbf839a074d21',
'target_type': 'revision',
},
'HEAD': {
'target': 'releases/1.2.0',
'target_type': 'alias',
},
}
expected_snapshot = {
'id': 'ba6e158ada75d0b3cfb209ffdf6daa4ed34a227a',
'branches': expected_branches,
}
check_snapshot(expected_snapshot, loader.storage)
origin_visit = next(loader.storage.origin_visit_get(url))
assert origin_visit['status'] == 'full'
# release artifact, new artifact
# {visit full, status full, new snapshot with shared history as prior snapshot}
def test_release_artifact_incremental_visit(swh_config, local_get_visits):
"""With prior visit, 2nd load will result with a different snapshot
with some shared history
"""
url = 'https://pypi.org/project/0805nexter'
loader = PyPILoader(url)
visit1_actual_load_status = loader.load()
visit1_stats = loader.storage.stat_counters()
assert visit1_actual_load_status == {'status': 'eventful'}
origin_visit1 = next(loader.storage.origin_visit_get(url))
assert origin_visit1['status'] == 'full'
assert {
'content': 6,
'directory': 4,
'origin': 1,
'origin_visit': 1,
'person': 1,
'release': 0,
'revision': 2,
'skipped_content': 0,
'snapshot': 1
} == visit1_stats
# Reset internal state
loader._info = None
visit2_actual_load_status = loader.load()
visit2_stats = loader.storage.stat_counters()
assert visit2_actual_load_status == {'status': 'eventful'}
visits = list(loader.storage.origin_visit_get(url))
assert len(visits) == 2
assert visits[1]['status'] == 'full'
assert {
'content': 6 + 1, # 1 more content
'directory': 4 + 2, # 2 more directories
'origin': 1,
'origin_visit': 1 + 1,
'person': 1,
'release': 0,
'revision': 2 + 1, # 1 more revision
'skipped_content': 0,
'snapshot': 1 + 1, # 1 more snapshot
} == visit2_stats
expected_contents = map(hash_to_bytes, [
'a61e24cdfdab3bb7817f6be85d37a3e666b34566',
'938c33483285fd8ad57f15497f538320df82aeb8',
'a27576d60e08c94a05006d2e6d540c0fdb5f38c8',
'405859113963cb7a797642b45f171d6360425d16',
'e5686aa568fdb1d19d7f1329267082fe40482d31',
'83ecf6ec1114fd260ca7a833a2d165e71258c338',
'92689fa2b7fb4d4fc6fb195bf73a50c87c030639'
])
assert list(loader.storage.content_missing_per_sha1(expected_contents))\
== []
expected_dirs = map(hash_to_bytes, [
'05219ba38bc542d4345d5638af1ed56c7d43ca7d',
'cf019eb456cf6f78d8c4674596f1c9a97ece8f44',
'b178b66bd22383d5f16f4f5c923d39ca798861b4',
'c3a58f8b57433a4b56caaa5033ae2e0931405338',
'e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a',
'52604d46843b898f5a43208045d09fcf8731631b',
])
assert list(loader.storage.directory_missing(expected_dirs)) == []
# {revision hash: directory hash}
expected_revs = {
hash_to_bytes('4c99891f93b81450385777235a37b5e966dd1571'): hash_to_bytes('05219ba38bc542d4345d5638af1ed56c7d43ca7d'), # noqa
hash_to_bytes('e445da4da22b31bfebb6ffc4383dbf839a074d21'): hash_to_bytes('b178b66bd22383d5f16f4f5c923d39ca798861b4'), # noqa
hash_to_bytes('51247143b01445c9348afa9edfae31bf7c5d86b1'): hash_to_bytes('e226e7e4ad03b4fc1403d69a18ebdd6f2edd2b3a'), # noqa
}
assert list(loader.storage.revision_missing(expected_revs)) == []
expected_branches = {
'releases/1.1.0': {
'target': '4c99891f93b81450385777235a37b5e966dd1571',
'target_type': 'revision',
},
'releases/1.2.0': {
'target': 'e445da4da22b31bfebb6ffc4383dbf839a074d21',
'target_type': 'revision',
},
'releases/1.3.0': {
'target': '51247143b01445c9348afa9edfae31bf7c5d86b1',
'target_type': 'revision',
},
'HEAD': {
'target': 'releases/1.3.0',
'target_type': 'alias',
},
}
expected_snapshot = {
'id': '2e5149a7b0725d18231a37b342e9b7c4e121f283',
'branches': expected_branches,
}
check_snapshot(expected_snapshot, loader.storage)
origin_visit = list(loader.storage.origin_visit_get(url))[-1]
assert origin_visit['status'] == 'full'
urls = [
m.url for m in local_get_visits.request_history
if m.url.startswith('https://files.pythonhosted.org')
]
# visited each artifact once across 2 visits
assert len(urls) == len(set(urls))
# release artifact, no new artifact
# {visit full, status uneventful, same snapshot as before}
# release artifact, old artifact with different checksums
# {visit full, status full, new snapshot with shared history and some new
# different history}
File Metadata
Details
Attached
Mime Type
text/x-diff
Expires
Thu, Jul 3, 10:30 AM (2 w, 4 d ago)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3265902
Attached To
rDLDBASE Generic VCS/Package Loader
Event Timeline
Log In to Comment