Page Menu
Home
Software Heritage
Search
Configure Global Search
Log In
Files
F9338663
loader.py
No One
Temporary
Actions
Download File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Flag For Later
Size
35 KB
Subscribers
None
loader.py
View Options
# Copyright (C) 2019-2021 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import
datetime
import
hashlib
from
itertools
import
islice
import
json
import
logging
import
os
import
string
import
sys
import
tempfile
from
typing
import
(
Any
,
Dict
,
Generic
,
Iterable
,
Iterator
,
List
,
Mapping
,
Optional
,
Sequence
,
Set
,
Tuple
,
TypeVar
,
)
import
attr
import
sentry_sdk
from
swh.core.tarball
import
uncompress
from
swh.loader.core.loader
import
BaseLoader
from
swh.loader.exception
import
NotFound
from
swh.loader.package.utils
import
download
from
swh.model
import
from_disk
from
swh.model.collections
import
ImmutableDict
from
swh.model.hashutil
import
hash_to_hex
from
swh.model.identifiers
import
(
CoreSWHID
,
ExtendedObjectType
,
ExtendedSWHID
,
ObjectType
,
)
from
swh.model.model
import
(
ExtID
,
MetadataAuthority
,
MetadataAuthorityType
,
MetadataFetcher
,
Origin
,
OriginVisit
,
OriginVisitStatus
,
RawExtrinsicMetadata
,
Revision
,
Sha1Git
,
Snapshot
,
TargetType
,
)
from
swh.storage.algos.snapshot
import
snapshot_get_latest
from
swh.storage.interface
import
StorageInterface
from
swh.storage.utils
import
now
logger
=
logging
.
getLogger
(
__name__
)
SWH_METADATA_AUTHORITY
=
MetadataAuthority
(
type
=
MetadataAuthorityType
.
REGISTRY
,
url
=
"https://softwareheritage.org/"
,
metadata
=
{},
)
"""Metadata authority for extrinsic metadata generated by Software Heritage.
Used for metadata on "original artifacts", ie. length, filename, and checksums
of downloaded archive files."""
PartialExtID
=
Tuple
[
str
,
bytes
]
"""The ``extid_type`` and ``extid`` fields of an :class:`ExtID` object."""
@attr.s
class
RawExtrinsicMetadataCore
:
"""Contains the core of the metadata extracted by a loader, that will be
used to build a full RawExtrinsicMetadata object by adding object identifier,
context, and provenance information."""
format
=
attr
.
ib
(
type
=
str
)
metadata
=
attr
.
ib
(
type
=
bytes
)
discovery_date
=
attr
.
ib
(
type
=
Optional
[
datetime
.
datetime
],
default
=
None
)
"""Defaults to the visit date."""
@attr.s
class
BasePackageInfo
:
"""Compute the primary key for a dict using the id_keys as primary key
composite.
Args:
d: A dict entry to compute the primary key on
id_keys: Sequence of keys to use as primary key
Returns:
The identity for that dict entry
"""
url
=
attr
.
ib
(
type
=
str
)
filename
=
attr
.
ib
(
type
=
Optional
[
str
])
MANIFEST_FORMAT
:
Optional
[
string
.
Template
]
=
None
"""If not None, used by the default extid() implementation to format a manifest,
before hashing it to produce an ExtID."""
EXTID_TYPE
:
str
=
"package-manifest-sha256"
# The following attribute has kw_only=True in order to allow subclasses
# to add attributes. Without kw_only, attributes without default values cannot
# go after attributes with default values.
# See <https://github.com/python-attrs/attrs/issues/38>
directory_extrinsic_metadata
=
attr
.
ib
(
type
=
List
[
RawExtrinsicMetadataCore
],
default
=
[],
kw_only
=
True
,
)
""":term:`extrinsic metadata` collected by the loader, that will be attached to the
loaded directory and added to the Metadata storage."""
# TODO: add support for metadata for revisions and contents
def
extid
(
self
)
->
Optional
[
PartialExtID
]:
"""Returns a unique intrinsic identifier of this package info,
or None if this package info is not 'deduplicatable' (meaning that
we will always load it, instead of checking the ExtID storage
to see if we already did)"""
if
self
.
MANIFEST_FORMAT
is
None
:
return
None
else
:
manifest
=
self
.
MANIFEST_FORMAT
.
substitute
(
{
k
:
str
(
v
)
for
(
k
,
v
)
in
attr
.
asdict
(
self
)
.
items
()}
)
return
(
self
.
EXTID_TYPE
,
hashlib
.
sha256
(
manifest
.
encode
())
.
digest
())
TPackageInfo
=
TypeVar
(
"TPackageInfo"
,
bound
=
BasePackageInfo
)
class
PackageLoader
(
BaseLoader
,
Generic
[
TPackageInfo
]):
# Origin visit type (str) set by the loader
visit_type
=
""
visit_date
:
datetime
.
datetime
def
__init__
(
self
,
storage
:
StorageInterface
,
url
:
str
,
max_content_size
:
Optional
[
int
]
=
None
,
):
"""Loader's constructor. This raises exception if the minimal required
configuration is missing (cf. fn:`check` method).
Args:
storage: Storage instance
url: Origin url to load data from
"""
super
()
.
__init__
(
storage
=
storage
,
max_content_size
=
max_content_size
)
self
.
url
=
url
self
.
visit_date
=
datetime
.
datetime
.
now
(
tz
=
datetime
.
timezone
.
utc
)
def
get_versions
(
self
)
->
Sequence
[
str
]:
"""Return the list of all published package versions.
Raises:
class:`swh.loader.exception.NotFound` error when failing to read the
published package versions.
Returns:
Sequence of published versions
"""
return
[]
def
get_package_info
(
self
,
version
:
str
)
->
Iterator
[
Tuple
[
str
,
TPackageInfo
]]:
"""Given a release version of a package, retrieve the associated
package information for such version.
Args:
version: Package version
Returns:
(branch name, package metadata)
"""
yield from
{}
def
build_revision
(
self
,
p_info
:
TPackageInfo
,
uncompressed_path
:
str
,
directory
:
Sha1Git
)
->
Optional
[
Revision
]:
"""Build the revision from the archive metadata (extrinsic
artifact metadata) and the intrinsic metadata.
Args:
p_info: Package information
uncompressed_path: Artifact uncompressed path on disk
Returns:
Revision object
"""
raise
NotImplementedError
(
"build_revision"
)
def
get_default_version
(
self
)
->
str
:
"""Retrieve the latest release version if any.
Returns:
Latest version
"""
return
""
def
last_snapshot
(
self
)
->
Optional
[
Snapshot
]:
"""Retrieve the last snapshot out of the last visit.
"""
return
snapshot_get_latest
(
self
.
storage
,
self
.
url
)
def
known_artifacts
(
self
,
snapshot
:
Optional
[
Snapshot
]
)
->
Dict
[
Sha1Git
,
Optional
[
ImmutableDict
[
str
,
object
]]]:
"""Retrieve the known releases/artifact for the origin.
Args
snapshot: snapshot for the visit
Returns:
Dict of keys revision id (bytes), values a metadata Dict.
"""
if
not
snapshot
:
return
{}
# retrieve only revisions (e.g the alias we do not want here)
revs
=
[
rev
.
target
for
rev
in
snapshot
.
branches
.
values
()
if
rev
and
rev
.
target_type
==
TargetType
.
REVISION
]
known_revisions
=
self
.
storage
.
revision_get
(
revs
)
return
{
revision
.
id
:
revision
.
metadata
for
revision
in
known_revisions
if
revision
}
def
new_packageinfo_to_extid
(
self
,
p_info
:
TPackageInfo
)
->
Optional
[
PartialExtID
]:
return
p_info
.
extid
()
def
_get_known_extids
(
self
,
packages_info
:
List
[
TPackageInfo
]
)
->
Dict
[
PartialExtID
,
List
[
CoreSWHID
]]:
"""Compute the ExtIDs from new PackageInfo objects, searches which are already
loaded in the archive, and returns them if any."""
# Compute the ExtIDs of all the new packages, grouped by extid type
new_extids
:
Dict
[
str
,
List
[
bytes
]]
=
{}
for
p_info
in
packages_info
:
res
=
p_info
.
extid
()
if
res
is
not
None
:
(
extid_type
,
extid_extid
)
=
res
new_extids
.
setdefault
(
extid_type
,
[])
.
append
(
extid_extid
)
# For each extid type, call extid_get_from_extid() with all the extids of
# that type, and store them in the '(type, extid) -> target' map.
known_extids
:
Dict
[
PartialExtID
,
List
[
CoreSWHID
]]
=
{}
for
(
extid_type
,
extids
)
in
new_extids
.
items
():
for
extid
in
self
.
storage
.
extid_get_from_extid
(
extid_type
,
extids
):
if
extid
is
not
None
:
key
=
(
extid
.
extid_type
,
extid
.
extid
)
known_extids
.
setdefault
(
key
,
[])
.
append
(
extid
.
target
)
return
known_extids
def
resolve_revision_from_extids
(
self
,
known_extids
:
Dict
[
PartialExtID
,
List
[
CoreSWHID
]],
p_info
:
TPackageInfo
,
revision_whitelist
:
Set
[
Sha1Git
],
)
->
Optional
[
Sha1Git
]:
"""Resolve the revision from known ExtIDs and a package info object.
If the artifact has already been downloaded, this will return the
existing revision targeting that uncompressed artifact directory.
Otherwise, this returns None.
Args:
known_extids: Dict built from a list of ExtID, with the target as value
p_info: Package information
revision_whitelist: Any ExtID with target not in this set is filtered out
Returns:
None or revision identifier
"""
new_extid
=
p_info
.
extid
()
if
new_extid
is
None
:
return
None
for
extid_target
in
known_extids
.
get
(
new_extid
,
[]):
if
extid_target
.
object_id
not
in
revision_whitelist
:
# There is a known ExtID for this package, but its target is not
# in the snapshot.
# This can happen for three reasons:
#
# 1. a loader crashed after writing the ExtID, but before writing
# the snapshot
# 2. some other loader loaded the same artifact, but produced
# a different revision, causing an additional ExtID object
# to be written. We will probably find this loader's ExtID
# in a future iteration of this loop.
# Note that for now, this is impossible, as each loader has a
# completely different extid_type, but this is an implementation
# detail of each loader.
# 3. we took a snapshot, then the package disappeared,
# then we took another snapshot, and the package reappeared
#
# In case of 1, we must actually load the package now,
# so let's do it.
# TODO: detect when we are in case 3 using revision_missing instead
# of the snapshot.
continue
elif
extid_target
.
object_type
!=
ObjectType
.
REVISION
:
# We only support revisions for now.
# Note that this case should never be reached unless there is a
# collision between a revision hash and some non-revision object's
# hash, but better safe than sorry.
logger
.
warning
(
"
%s
is in the revision whitelist, but is not a revision."
,
hash_to_hex
(
extid_target
.
object_type
),
)
continue
return
extid_target
.
object_id
return
None
def
download_package
(
self
,
p_info
:
TPackageInfo
,
tmpdir
:
str
)
->
List
[
Tuple
[
str
,
Mapping
]]:
"""Download artifacts for a specific package. All downloads happen in
in the tmpdir folder.
Default implementation expects the artifacts package info to be
about one artifact per package.
Note that most implementation have 1 artifact per package. But some
implementation have multiple artifacts per package (debian), some have
none, the package is the artifact (gnu).
Args:
artifacts_package_info: Information on the package artifacts to
download (url, filename, etc...)
tmpdir: Location to retrieve such artifacts
Returns:
List of (path, computed hashes)
"""
return
[
download
(
p_info
.
url
,
dest
=
tmpdir
,
filename
=
p_info
.
filename
)]
def
uncompress
(
self
,
dl_artifacts
:
List
[
Tuple
[
str
,
Mapping
[
str
,
Any
]]],
dest
:
str
)
->
str
:
"""Uncompress the artifact(s) in the destination folder dest.
Optionally, this could need to use the p_info dict for some more
information (debian).
"""
uncompressed_path
=
os
.
path
.
join
(
dest
,
"src"
)
for
a_path
,
_
in
dl_artifacts
:
uncompress
(
a_path
,
dest
=
uncompressed_path
)
return
uncompressed_path
def
extra_branches
(
self
)
->
Dict
[
bytes
,
Mapping
[
str
,
Any
]]:
"""Return an extra dict of branches that are used to update the set of
branches.
"""
return
{}
def
finalize_visit
(
self
,
*
,
snapshot
:
Optional
[
Snapshot
],
visit
:
OriginVisit
,
status_visit
:
str
,
status_load
:
str
,
failed_branches
:
List
[
str
],
)
->
Dict
[
str
,
Any
]:
"""Finalize the visit:
- flush eventual unflushed data to storage
- update origin visit's status
- return the task's status
"""
self
.
storage
.
flush
()
snapshot_id
:
Optional
[
bytes
]
=
None
if
snapshot
and
snapshot
.
id
:
# to prevent the snapshot.id to b""
snapshot_id
=
snapshot
.
id
assert
visit
.
visit
visit_status
=
OriginVisitStatus
(
origin
=
self
.
url
,
visit
=
visit
.
visit
,
type
=
self
.
visit_type
,
date
=
now
(),
status
=
status_visit
,
snapshot
=
snapshot_id
,
)
self
.
storage
.
origin_visit_status_add
([
visit_status
])
result
:
Dict
[
str
,
Any
]
=
{
"status"
:
status_load
,
}
if
snapshot_id
:
result
[
"snapshot_id"
]
=
hash_to_hex
(
snapshot_id
)
if
failed_branches
:
logger
.
warning
(
"
%d
failed branches"
,
len
(
failed_branches
))
for
i
,
urls
in
enumerate
(
islice
(
failed_branches
,
50
)):
prefix_url
=
"Failed branches: "
if
i
==
0
else
""
logger
.
warning
(
"
%s%s
"
,
prefix_url
,
urls
)
return
result
def
load
(
self
)
->
Dict
:
"""Load for a specific origin the associated contents.
1. Get the list of versions in an origin.
2. Get the snapshot from the previous run of the loader,
and filter out versions that were already loaded, if their
:term:`extids <extid>` match
Then, for each remaining version in the origin
3. Fetch the files for one package version By default, this can be
implemented as a simple HTTP request. Loaders with more specific
requirements can override this, e.g.: the PyPI loader checks the
integrity of the downloaded files; the Debian loader has to download
and check several files for one package version.
4. Extract the downloaded files. By default, this would be a universal
archive/tarball extraction.
Loaders for specific formats can override this method (for instance,
the Debian loader uses dpkg-source -x).
5. Convert the extracted directory to a set of Software Heritage
objects Using swh.model.from_disk.
6. Extract the metadata from the unpacked directories This would only
be applicable for "smart" loaders like npm (parsing the
package.json), PyPI (parsing the PKG-INFO file) or Debian (parsing
debian/changelog and debian/control).
On "minimal-metadata" sources such as the GNU archive, the lister
should provide the minimal set of metadata needed to populate the
revision/release objects (authors, dates) as an argument to the
task.
7. Generate the revision/release objects for the given version. From
the data generated at steps 3 and 4.
end for each
8. Generate and load the snapshot for the visit
Using the revisions/releases collected at step 7., and the branch
information from step 2., generate a snapshot and load it into the
Software Heritage archive
"""
status_load
=
"uneventful"
# either: eventful, uneventful, failed
status_visit
=
"full"
# see swh.model.model.OriginVisitStatus
snapshot
=
None
failed_branches
:
List
[
str
]
=
[]
# Prepare origin and origin_visit
origin
=
Origin
(
url
=
self
.
url
)
try
:
self
.
storage
.
origin_add
([
origin
])
visit
=
list
(
self
.
storage
.
origin_visit_add
(
[
OriginVisit
(
origin
=
self
.
url
,
date
=
self
.
visit_date
,
type
=
self
.
visit_type
,
)
]
)
)[
0
]
except
Exception
as
e
:
logger
.
exception
(
"Failed to initialize origin_visit for
%s
"
,
self
.
url
)
sentry_sdk
.
capture_exception
(
e
)
return
{
"status"
:
"failed"
}
# Get the previous snapshot for this origin. It is then used to see which
# of the package's versions are already loaded in the archive.
try
:
last_snapshot
=
self
.
last_snapshot
()
logger
.
debug
(
"last snapshot:
%s
"
,
last_snapshot
)
known_artifacts
=
self
.
known_artifacts
(
last_snapshot
)
logger
.
debug
(
"known artifacts:
%s
"
,
known_artifacts
)
except
Exception
as
e
:
logger
.
exception
(
"Failed to get previous state for
%s
"
,
self
.
url
)
sentry_sdk
.
capture_exception
(
e
)
return
self
.
finalize_visit
(
snapshot
=
snapshot
,
visit
=
visit
,
failed_branches
=
failed_branches
,
status_visit
=
"failed"
,
status_load
=
"failed"
,
)
load_exceptions
:
List
[
Exception
]
=
[]
# Get the list of all version names
try
:
versions
=
self
.
get_versions
()
except
NotFound
:
return
self
.
finalize_visit
(
snapshot
=
snapshot
,
visit
=
visit
,
failed_branches
=
failed_branches
,
status_visit
=
"not_found"
,
status_load
=
"failed"
,
)
except
Exception
:
return
self
.
finalize_visit
(
snapshot
=
snapshot
,
visit
=
visit
,
failed_branches
=
failed_branches
,
status_visit
=
"failed"
,
status_load
=
"failed"
,
)
# Get the metadata of each version's package
packages_info
:
List
[
Tuple
[
str
,
str
,
TPackageInfo
]]
=
[
(
version
,
branch_name
,
p_info
)
for
version
in
versions
for
(
branch_name
,
p_info
)
in
self
.
get_package_info
(
version
)
]
# Compute the ExtID of each of these packages
known_extids
=
self
.
_get_known_extids
(
[
p_info
for
(
_
,
_
,
p_info
)
in
packages_info
]
)
if
last_snapshot
is
None
:
last_snapshot_targets
:
Set
[
Sha1Git
]
=
set
()
else
:
last_snapshot_targets
=
{
branch
.
target
for
branch
in
last_snapshot
.
branches
.
values
()
}
new_extids
:
Set
[
ExtID
]
=
set
()
tmp_revisions
:
Dict
[
str
,
List
[
Tuple
[
str
,
Sha1Git
]]]
=
{
version
:
[]
for
version
in
versions
}
for
(
version
,
branch_name
,
p_info
)
in
packages_info
:
logger
.
debug
(
"package_info:
%s
"
,
p_info
)
# Check if the package was already loaded, using its ExtID
revision_id
=
self
.
resolve_revision_from_extids
(
known_extids
,
p_info
,
last_snapshot_targets
)
if
revision_id
is
None
:
# No matching revision found in the last snapshot, load it.
try
:
res
=
self
.
_load_revision
(
p_info
,
origin
)
if
res
:
(
revision_id
,
directory_id
)
=
res
assert
revision_id
assert
directory_id
self
.
_load_extrinsic_directory_metadata
(
p_info
,
revision_id
,
directory_id
)
self
.
storage
.
flush
()
status_load
=
"eventful"
except
Exception
as
e
:
self
.
storage
.
clear_buffers
()
load_exceptions
.
append
(
e
)
sentry_sdk
.
capture_exception
(
e
)
logger
.
exception
(
"Failed loading branch
%s
for
%s
"
,
branch_name
,
self
.
url
)
failed_branches
.
append
(
branch_name
)
continue
if
revision_id
is
None
:
continue
partial_extid
=
p_info
.
extid
()
if
partial_extid
is
not
None
:
(
extid_type
,
extid
)
=
partial_extid
revision_swhid
=
CoreSWHID
(
object_type
=
ObjectType
.
REVISION
,
object_id
=
revision_id
)
new_extids
.
add
(
ExtID
(
extid_type
=
extid_type
,
extid
=
extid
,
target
=
revision_swhid
)
)
tmp_revisions
[
version
]
.
append
((
branch_name
,
revision_id
))
if
load_exceptions
:
status_visit
=
"partial"
if
not
tmp_revisions
:
# We could not load any revisions; fail completely
return
self
.
finalize_visit
(
snapshot
=
snapshot
,
visit
=
visit
,
failed_branches
=
failed_branches
,
status_visit
=
"failed"
,
status_load
=
"failed"
,
)
try
:
# Retrieve the default release version (the "latest" one)
default_version
=
self
.
get_default_version
()
logger
.
debug
(
"default version:
%s
"
,
default_version
)
# Retrieve extra branches
extra_branches
=
self
.
extra_branches
()
logger
.
debug
(
"extra branches:
%s
"
,
extra_branches
)
snapshot
=
self
.
_load_snapshot
(
default_version
,
tmp_revisions
,
extra_branches
)
self
.
storage
.
flush
()
except
Exception
as
e
:
logger
.
exception
(
"Failed to build snapshot for origin
%s
"
,
self
.
url
)
sentry_sdk
.
capture_exception
(
e
)
status_visit
=
"failed"
status_load
=
"failed"
if
snapshot
:
try
:
metadata_objects
=
self
.
build_extrinsic_snapshot_metadata
(
snapshot
.
id
)
self
.
_load_metadata_objects
(
metadata_objects
)
except
Exception
as
e
:
logger
.
exception
(
"Failed to load extrinsic snapshot metadata for
%s
"
,
self
.
url
)
sentry_sdk
.
capture_exception
(
e
)
status_visit
=
"partial"
status_load
=
"failed"
try
:
metadata_objects
=
self
.
build_extrinsic_origin_metadata
()
self
.
_load_metadata_objects
(
metadata_objects
)
except
Exception
as
e
:
logger
.
exception
(
"Failed to load extrinsic origin metadata for
%s
"
,
self
.
url
)
sentry_sdk
.
capture_exception
(
e
)
status_visit
=
"partial"
status_load
=
"failed"
self
.
_load_extids
(
new_extids
)
return
self
.
finalize_visit
(
snapshot
=
snapshot
,
visit
=
visit
,
failed_branches
=
failed_branches
,
status_visit
=
status_visit
,
status_load
=
status_load
,
)
def
_load_directory
(
self
,
dl_artifacts
:
List
[
Tuple
[
str
,
Mapping
[
str
,
Any
]]],
tmpdir
:
str
)
->
Tuple
[
str
,
from_disk
.
Directory
]:
uncompressed_path
=
self
.
uncompress
(
dl_artifacts
,
dest
=
tmpdir
)
logger
.
debug
(
"uncompressed_path:
%s
"
,
uncompressed_path
)
directory
=
from_disk
.
Directory
.
from_disk
(
path
=
uncompressed_path
.
encode
(
"utf-8"
),
max_content_length
=
self
.
max_content_size
,
)
contents
,
skipped_contents
,
directories
=
from_disk
.
iter_directory
(
directory
)
logger
.
debug
(
"Number of skipped contents:
%s
"
,
len
(
skipped_contents
))
self
.
storage
.
skipped_content_add
(
skipped_contents
)
logger
.
debug
(
"Number of contents:
%s
"
,
len
(
contents
))
self
.
storage
.
content_add
(
contents
)
logger
.
debug
(
"Number of directories:
%s
"
,
len
(
directories
))
self
.
storage
.
directory_add
(
directories
)
return
(
uncompressed_path
,
directory
)
def
_load_revision
(
self
,
p_info
:
TPackageInfo
,
origin
)
->
Optional
[
Tuple
[
Sha1Git
,
Sha1Git
]]:
"""Does all the loading of a revision itself:
* downloads a package and uncompresses it
* loads it from disk
* adds contents, directories, and revision to self.storage
* returns (revision_id, directory_id)
Raises
exception when unable to download or uncompress artifacts
"""
with
tempfile
.
TemporaryDirectory
()
as
tmpdir
:
dl_artifacts
=
self
.
download_package
(
p_info
,
tmpdir
)
(
uncompressed_path
,
directory
)
=
self
.
_load_directory
(
dl_artifacts
,
tmpdir
)
# FIXME: This should be release. cf. D409
revision
=
self
.
build_revision
(
p_info
,
uncompressed_path
,
directory
=
directory
.
hash
)
if
not
revision
:
# Some artifacts are missing intrinsic metadata
# skipping those
return
None
metadata
=
[
metadata
for
(
filepath
,
metadata
)
in
dl_artifacts
]
original_artifact_metadata
=
RawExtrinsicMetadata
(
target
=
ExtendedSWHID
(
object_type
=
ExtendedObjectType
.
DIRECTORY
,
object_id
=
revision
.
directory
),
discovery_date
=
self
.
visit_date
,
authority
=
SWH_METADATA_AUTHORITY
,
fetcher
=
self
.
get_metadata_fetcher
(),
format
=
"original-artifacts-json"
,
metadata
=
json
.
dumps
(
metadata
)
.
encode
(),
origin
=
self
.
url
,
revision
=
CoreSWHID
(
object_type
=
ObjectType
.
REVISION
,
object_id
=
revision
.
id
),
)
self
.
_load_metadata_objects
([
original_artifact_metadata
])
logger
.
debug
(
"Revision:
%s
"
,
revision
)
self
.
storage
.
revision_add
([
revision
])
assert
directory
.
hash
return
(
revision
.
id
,
directory
.
hash
)
def
_load_snapshot
(
self
,
default_version
:
str
,
revisions
:
Dict
[
str
,
List
[
Tuple
[
str
,
bytes
]]],
extra_branches
:
Dict
[
bytes
,
Mapping
[
str
,
Any
]],
)
->
Optional
[
Snapshot
]:
"""Build snapshot out of the current revisions stored and extra branches.
Then load it in the storage.
"""
logger
.
debug
(
"revisions:
%s
"
,
revisions
)
# Build and load the snapshot
branches
=
{}
# type: Dict[bytes, Mapping[str, Any]]
for
version
,
branch_name_revisions
in
revisions
.
items
():
if
version
==
default_version
and
len
(
branch_name_revisions
)
==
1
:
# only 1 branch (no ambiguity), we can create an alias
# branch 'HEAD'
branch_name
,
_
=
branch_name_revisions
[
0
]
# except for some corner case (deposit)
if
branch_name
!=
"HEAD"
:
branches
[
b
"HEAD"
]
=
{
"target_type"
:
"alias"
,
"target"
:
branch_name
.
encode
(
"utf-8"
),
}
for
branch_name
,
target
in
branch_name_revisions
:
branches
[
branch_name
.
encode
(
"utf-8"
)]
=
{
"target_type"
:
"revision"
,
"target"
:
target
,
}
# Deal with extra-branches
for
name
,
branch_target
in
extra_branches
.
items
():
if
name
in
branches
:
logger
.
error
(
"Extra branch '
%s
' has been ignored"
,
name
)
else
:
branches
[
name
]
=
branch_target
snapshot_data
=
{
"branches"
:
branches
}
logger
.
debug
(
"snapshot:
%s
"
,
snapshot_data
)
snapshot
=
Snapshot
.
from_dict
(
snapshot_data
)
logger
.
debug
(
"snapshot:
%s
"
,
snapshot
)
self
.
storage
.
snapshot_add
([
snapshot
])
return
snapshot
def
get_loader_name
(
self
)
->
str
:
"""Returns a fully qualified name of this loader."""
return
f
"{self.__class__.__module__}.{self.__class__.__name__}"
def
get_loader_version
(
self
)
->
str
:
"""Returns the version of the current loader."""
module_name
=
self
.
__class__
.
__module__
or
""
module_name_parts
=
module_name
.
split
(
"."
)
# Iterate rootward through the package hierarchy until we find a parent of this
# loader's module with a __version__ attribute.
for
prefix_size
in
range
(
len
(
module_name_parts
),
0
,
-
1
):
package_name
=
"."
.
join
(
module_name_parts
[
0
:
prefix_size
])
module
=
sys
.
modules
[
package_name
]
if
hasattr
(
module
,
"__version__"
):
return
module
.
__version__
# type: ignore
# If this loader's class has no parent package with a __version__,
# it should implement it itself.
raise
NotImplementedError
(
f
"Could not dynamically find the version of {self.get_loader_name()}."
)
def
get_metadata_fetcher
(
self
)
->
MetadataFetcher
:
"""Returns a MetadataFetcher instance representing this package loader;
which is used to for adding provenance information to extracted
extrinsic metadata, if any."""
return
MetadataFetcher
(
name
=
self
.
get_loader_name
(),
version
=
self
.
get_loader_version
(),
metadata
=
{},
)
def
get_metadata_authority
(
self
)
->
MetadataAuthority
:
"""For package loaders that get extrinsic metadata, returns the authority
the metadata are coming from.
"""
raise
NotImplementedError
(
"get_metadata_authority"
)
def
get_extrinsic_origin_metadata
(
self
)
->
List
[
RawExtrinsicMetadataCore
]:
"""Returns metadata items, used by build_extrinsic_origin_metadata."""
return
[]
def
build_extrinsic_origin_metadata
(
self
)
->
List
[
RawExtrinsicMetadata
]:
"""Builds a list of full RawExtrinsicMetadata objects, using
metadata returned by get_extrinsic_origin_metadata."""
metadata_items
=
self
.
get_extrinsic_origin_metadata
()
if
not
metadata_items
:
# If this package loader doesn't write metadata, no need to require
# an implementation for get_metadata_authority.
return
[]
authority
=
self
.
get_metadata_authority
()
fetcher
=
self
.
get_metadata_fetcher
()
metadata_objects
=
[]
for
item
in
metadata_items
:
metadata_objects
.
append
(
RawExtrinsicMetadata
(
target
=
Origin
(
self
.
url
)
.
swhid
(),
discovery_date
=
item
.
discovery_date
or
self
.
visit_date
,
authority
=
authority
,
fetcher
=
fetcher
,
format
=
item
.
format
,
metadata
=
item
.
metadata
,
)
)
return
metadata_objects
def
get_extrinsic_snapshot_metadata
(
self
)
->
List
[
RawExtrinsicMetadataCore
]:
"""Returns metadata items, used by build_extrinsic_snapshot_metadata."""
return
[]
def
build_extrinsic_snapshot_metadata
(
self
,
snapshot_id
:
Sha1Git
)
->
List
[
RawExtrinsicMetadata
]:
"""Builds a list of full RawExtrinsicMetadata objects, using
metadata returned by get_extrinsic_snapshot_metadata."""
metadata_items
=
self
.
get_extrinsic_snapshot_metadata
()
if
not
metadata_items
:
# If this package loader doesn't write metadata, no need to require
# an implementation for get_metadata_authority.
return
[]
authority
=
self
.
get_metadata_authority
()
fetcher
=
self
.
get_metadata_fetcher
()
metadata_objects
=
[]
for
item
in
metadata_items
:
metadata_objects
.
append
(
RawExtrinsicMetadata
(
target
=
ExtendedSWHID
(
object_type
=
ExtendedObjectType
.
SNAPSHOT
,
object_id
=
snapshot_id
),
discovery_date
=
item
.
discovery_date
or
self
.
visit_date
,
authority
=
authority
,
fetcher
=
fetcher
,
format
=
item
.
format
,
metadata
=
item
.
metadata
,
origin
=
self
.
url
,
)
)
return
metadata_objects
def
build_extrinsic_directory_metadata
(
self
,
p_info
:
TPackageInfo
,
revision_id
:
Sha1Git
,
directory_id
:
Sha1Git
,
)
->
List
[
RawExtrinsicMetadata
]:
if
not
p_info
.
directory_extrinsic_metadata
:
# If this package loader doesn't write metadata, no need to require
# an implementation for get_metadata_authority.
return
[]
authority
=
self
.
get_metadata_authority
()
fetcher
=
self
.
get_metadata_fetcher
()
metadata_objects
=
[]
for
item
in
p_info
.
directory_extrinsic_metadata
:
metadata_objects
.
append
(
RawExtrinsicMetadata
(
target
=
ExtendedSWHID
(
object_type
=
ExtendedObjectType
.
DIRECTORY
,
object_id
=
directory_id
),
discovery_date
=
item
.
discovery_date
or
self
.
visit_date
,
authority
=
authority
,
fetcher
=
fetcher
,
format
=
item
.
format
,
metadata
=
item
.
metadata
,
origin
=
self
.
url
,
revision
=
CoreSWHID
(
object_type
=
ObjectType
.
REVISION
,
object_id
=
revision_id
),
)
)
return
metadata_objects
def
_load_extrinsic_directory_metadata
(
self
,
p_info
:
TPackageInfo
,
revision_id
:
Sha1Git
,
directory_id
:
Sha1Git
,
)
->
None
:
metadata_objects
=
self
.
build_extrinsic_directory_metadata
(
p_info
,
revision_id
,
directory_id
)
self
.
_load_metadata_objects
(
metadata_objects
)
def
_load_metadata_objects
(
self
,
metadata_objects
:
List
[
RawExtrinsicMetadata
]
)
->
None
:
if
not
metadata_objects
:
# If this package loader doesn't write metadata, no need to require
# an implementation for get_metadata_authority.
return
self
.
_create_authorities
(
mo
.
authority
for
mo
in
metadata_objects
)
self
.
_create_fetchers
(
mo
.
fetcher
for
mo
in
metadata_objects
)
self
.
storage
.
raw_extrinsic_metadata_add
(
metadata_objects
)
def
_create_authorities
(
self
,
authorities
:
Iterable
[
MetadataAuthority
])
->
None
:
deduplicated_authorities
=
{
(
authority
.
type
,
authority
.
url
):
authority
for
authority
in
authorities
}
if
authorities
:
self
.
storage
.
metadata_authority_add
(
list
(
deduplicated_authorities
.
values
()))
def
_create_fetchers
(
self
,
fetchers
:
Iterable
[
MetadataFetcher
])
->
None
:
deduplicated_fetchers
=
{
(
fetcher
.
name
,
fetcher
.
version
):
fetcher
for
fetcher
in
fetchers
}
if
fetchers
:
self
.
storage
.
metadata_fetcher_add
(
list
(
deduplicated_fetchers
.
values
()))
def
_load_extids
(
self
,
extids
:
Set
[
ExtID
])
->
None
:
if
not
extids
:
return
try
:
self
.
storage
.
extid_add
(
list
(
extids
))
except
Exception
as
e
:
logger
.
exception
(
"Failed to load new ExtIDs for
%s
"
,
self
.
url
)
sentry_sdk
.
capture_exception
(
e
)
# No big deal, it just means the next visit will load the same versions
# again.
File Metadata
Details
Attached
Mime Type
text/x-python
Expires
Jul 4 2025, 9:01 AM (6 w, 2 d ago)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3291667
Attached To
rDLDBASE Generic VCS/Package Loader
Event Timeline
Log In to Comment