diff --git a/docs/metadata.md b/docs/metadata.md index 6f560428..c3cb6073 100644 --- a/docs/metadata.md +++ b/docs/metadata.md @@ -1,166 +1,166 @@ # Deposit metadata When making a software deposit into the SWH archive, one can add information describing the software artifact and the software project. and the metadata will be translated to the [CodeMeta v.2](https://doi.org/10.5063/SCHEMA/CODEMETA-2.0) vocabulary if possible. ## Metadata requirements MUST -- **the schema/vocabulary** used *MUST* be specified with a persistant url +- **the schema/vocabulary** used *MUST* be specified with a persistent url (DublinCore, DOAP, CodeMeta, etc.) ```XML or or ``` - **the url** representing the location of the source *MUST* be provided under the url tag. The url will be used for creating an origin object in the archive. ```XML www.url-example.com or www.url-example.com or www.url-example.com ``` - **the external_identifier** *MUST* be provided as an identifier - **the name** of the software deposit *MUST* be provided [atom:title, codemeta:name, dcterms:title] - **the authors** of the software deposit *MUST* be provided SHOULD - **the external_identifier** *SHOULD* match the Slug external-identifier in the header - **the description** of the software deposit *SHOULD* be provided [codemeta:description] - short or long description of the software - **the license/s** of the software deposit *SHOULD* be provided [codemeta:license] MAY - other metadata *MAY* be added with terms defined by the schema in use. ## Examples ### Using only Atom ```XML Awesome Compiler urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a 1785io25c695 2017-10-07T15:17:08Z some awesome author ``` ### Using Atom with CodeMeta ```XML Awesome Compiler urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a 1785io25c695 1785io25c695 origin url other identifier, DOI, ARK Domain description key-word 1 key-word 2 creation date publication date comment article name article id Collaboration/Projet project name id see also Sponsor A Sponsor B Platform/OS dependencies Version active license url spdx .Net Framework 3.0 Python2.3 author1 Inria UPMC author2 Inria UPMC http://code.com language 1 language 2 http://issuetracker.com ``` ### Using Atom with DublinCore and CodeMeta (multi-schema entry) ``` XML Awesome Compiler hal urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a %s hal-01587361 doi:10.5281/zenodo.438684 The assignment problem AffectationRO author [INFO] Computer Science [cs] [INFO.INFO-RO] Computer Science [cs]/Operations Research [cs.RO] SOFTWARE Project in OR: The assignment problemA java implementation for the assignment problem first release description fr 2015-06-01 2017-10-19 en origin url 1.0.0 key word Comment Rfrence interne link Sponsor Platform/OS dependencies Ended license url spdx http://code.com language 1 language 2 ``` diff --git a/swh/deposit/api/private/deposit_read.py b/swh/deposit/api/private/deposit_read.py index e5636071..cfde3fbe 100644 --- a/swh/deposit/api/private/deposit_read.py +++ b/swh/deposit/api/private/deposit_read.py @@ -1,234 +1,235 @@ # Copyright (C) 2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import os import shutil import tempfile from contextlib import contextmanager from django.http import FileResponse from rest_framework import status from swh.loader.tar import tarball from swh.model import hashutil, identifiers from ..common import SWHGetDepositAPI, SWHPrivateAPIView from ...models import Deposit, DepositRequest from ...models import previous_revision_id @contextmanager def aggregate_tarballs(extraction_dir, archive_paths): """Aggregate multiple tarballs into one and returns this new archive's path. Args: extraction_dir (path): Path to use for the tarballs computation archive_paths ([str]): Deposit's archive paths Returns: Tuple (directory to clean up, archive path (aggregated or not)) """ if len(archive_paths) > 1: # need to rebuild one archive # from multiple ones os.makedirs(extraction_dir, 0o755, exist_ok=True) dir_path = tempfile.mkdtemp(prefix='swh.deposit-', dir=extraction_dir) # root folder to build an aggregated tarball aggregated_tarball_rootdir = os.path.join(dir_path, 'aggregate') os.makedirs(aggregated_tarball_rootdir, 0o755, exist_ok=True) # uncompress in a temporary location all archives for archive_path in archive_paths: tarball.uncompress(archive_path, aggregated_tarball_rootdir) # Aggregate into one big tarball the multiple smaller ones temp_tarpath = tarball.compress( aggregated_tarball_rootdir + '.zip', nature='zip', dirpath_or_files=aggregated_tarball_rootdir) # can already clean up temporary directory shutil.rmtree(aggregated_tarball_rootdir) try: yield temp_tarpath finally: shutil.rmtree(dir_path) else: # only 1 archive, no need to do fancy actions (and no cleanup step) yield archive_paths[0] class SWHDepositReadArchives(SWHGetDepositAPI, SWHPrivateAPIView): """Dedicated class to read a deposit's raw archives content. Only GET is supported. """ ADDITIONAL_CONFIG = { 'extraction_dir': ('str', '/tmp/swh-deposit/archive/'), } def __init__(self): super().__init__() self.extraction_dir = self.config['extraction_dir'] if not os.path.exists(self.extraction_dir): os.makedirs(self.extraction_dir) def retrieve_archives(self, deposit_id): """Given a deposit identifier, returns its associated archives' path. Yields: path to deposited archives """ deposit = Deposit.objects.get(pk=deposit_id) deposit_requests = DepositRequest.objects.filter( deposit=deposit, type=self.deposit_request_types['archive']).order_by('id') for deposit_request in deposit_requests: yield deposit_request.archive.path def process_get(self, req, collection_name, deposit_id): """Build a unique tarball from the multiple received and stream that content to the client. Args: req (Request): collection_name (str): Collection owning the deposit deposit_id (id): Deposit concerned by the reading Returns: Tuple status, stream of content, content-type """ archive_paths = list(self.retrieve_archives(deposit_id)) with aggregate_tarballs(self.extraction_dir, archive_paths) as path: return FileResponse(open(path, 'rb'), status=status.HTTP_200_OK, content_type='application/octet-stream') class SWHDepositReadMetadata(SWHGetDepositAPI, SWHPrivateAPIView): """Class in charge of aggregating metadata on a deposit. """ ADDITIONAL_CONFIG = { 'provider': ('dict', { 'provider_name': '', 'provider_type': 'deposit_client', 'provider_url': '', 'metadata': { } }), 'tool': ('dict', { 'tool_name': 'swh-deposit', 'tool_version': '0.0.1', 'tool_configuration': { 'sword_version': '2' } }) } def __init__(self): super().__init__() self.provider = self.config['provider'] self.tool = self.config['tool'] def _aggregate_metadata(self, deposit, metadata_requests): """Retrieve and aggregates metadata information. """ metadata = {} for req in metadata_requests: metadata.update(req.metadata) return metadata def aggregate(self, deposit, requests): """Aggregate multiple data on deposit into one unified data dictionary. Args: deposit (Deposit): Deposit concerned by the data aggregation. requests ([DepositRequest]): List of associated requests which need aggregation. Returns: Dictionary of data representing the deposit to inject in swh. """ data = {} # Retrieve tarballs/metadata information metadata = self._aggregate_metadata(deposit, requests) # Read information metadata data['origin'] = { 'type': 'deposit', - 'url': deposit.client.url + deposit.external_id, + 'url': os.path.join(deposit.client.url.rstrip('/'), + deposit.external_id), } # revision fullname = deposit.client.get_full_name() author_committer = { 'name': deposit.client.last_name, 'fullname': fullname, 'email': deposit.client.email, } # metadata provider self.provider['provider_name'] = deposit.client.last_name self.provider['provider_url'] = deposit.client.url revision_type = 'tar' revision_msg = '%s: Deposit %s in collection %s' % ( fullname, deposit.id, deposit.collection.name) complete_date = identifiers.normalize_timestamp(deposit.complete_date) data['revision'] = { 'synthetic': True, 'date': complete_date, 'committer_date': complete_date, 'author': author_committer, 'committer': author_committer, 'type': revision_type, 'message': revision_msg, 'metadata': metadata, } parent_revision = previous_revision_id(deposit.swh_id) if parent_revision: data['revision'] = { 'parents': [hashutil.hash_to_bytes(parent_revision)] } data['occurrence'] = { 'branch': 'master' } data['origin_metadata'] = { 'provider': self.provider, 'tool': self.tool, 'metadata': metadata } return data def process_get(self, req, collection_name, deposit_id): deposit = Deposit.objects.get(pk=deposit_id) requests = DepositRequest.objects.filter( deposit=deposit, type=self.deposit_request_types['metadata']) data = self.aggregate(deposit, requests) d = {} if data: d = json.dumps(data) return status.HTTP_200_OK, d, 'application/json' diff --git a/swh/deposit/injection/loader.py b/swh/deposit/injection/loader.py index bdae904a..1e5d5bc6 100644 --- a/swh/deposit/injection/loader.py +++ b/swh/deposit/injection/loader.py @@ -1,197 +1,198 @@ # Copyright (C) 2015-2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import datetime import os import requests import tempfile from swh.model import hashutil from swh.loader.tar import loader from swh.loader.core.loader import SWHLoader class DepositClient: """Deposit client to read archive, metadata or update deposit's status. """ def read_archive_to(self, archive_update_url, archive_path, log=None): """Retrieve the archive from the deposit to a local directory. Args: archive_update_url (str): The full deposit archive(s)'s raw content to retrieve locally archive_path (str): the local archive's path where to store the raw content Returns: The archive path to the local archive to load. Or None if any problem arose. """ r = requests.get(archive_update_url, stream=True) if r.ok: with open(archive_path, 'wb') as f: for chunk in r.iter_content(): f.write(chunk) return archive_path msg = 'Problem when retrieving deposit archive at %s' % ( archive_update_url, ) if log: log.error(msg) raise ValueError(msg) def read_metadata(self, metadata_url, log=None): """Retrieve the metadata information on a given deposit. Args: metadata_url (str): The full deposit metadata url to retrieve locally Returns: The dictionary of metadata for that deposit or None if any problem arose. """ r = requests.get(metadata_url) if r.ok: data = r.json() return data msg = 'Problem when retrieving metadata at %s' % metadata_url if log: log.error(msg) raise ValueError(msg) def update_status(self, update_status_url, status, revision_id=None): """Update the deposit's status. Args: update_status_url (str): the full deposit's archive status (str): The status to update the deposit with revision_id (str/None): the revision's identifier to update to """ payload = {'status': status} if revision_id: payload['revision_id'] = revision_id requests.put(update_status_url, json=payload) class DepositLoader(loader.TarLoader): """Deposit loader implementation. This is a subclass of the :class:TarLoader as the main goal of this class is to first retrieve the deposit's tarball contents as one and its associated metadata. Then provide said tarball to be loaded by the TarLoader. This will: - retrieves the deposit's archive locally - provide the archive to be loaded by the tar loader - clean up the temporary location used to retrieve the archive locally - update the deposit's status accordingly """ def __init__(self, client=None): super().__init__() if client: self.client = client else: self.client = DepositClient() def load(self, *, archive_url, deposit_meta_url, deposit_update_url): SWHLoader.load( self, archive_url=archive_url, deposit_meta_url=deposit_meta_url, deposit_update_url=deposit_update_url) def prepare(self, *, archive_url, deposit_meta_url, deposit_update_url): """Prepare the injection by first retrieving the deposit's raw archive content. """ self.deposit_update_url = deposit_update_url temporary_directory = tempfile.TemporaryDirectory() self.temporary_directory = temporary_directory archive_path = os.path.join(temporary_directory.name, 'archive.zip') archive = self.client.get_archive( archive_url, archive_path, log=self.log) metadata = self.client.get_metadata( deposit_meta_url, log=self.log) origin = metadata['origin'] visit_date = datetime.datetime.now(tz=datetime.timezone.utc) revision = metadata['revision'] occurrence = metadata['occurrence'] self.origin_metadata = metadata['origin_metadata'] self.prepare_metadata() self.client.update_deposit_status(deposit_update_url, 'injecting') super().prepare(tar_path=archive, origin=origin, visit_date=visit_date, revision=revision, occurrences=[occurrence]) def store_metadata(self): """Storing the origin_metadata during the load processus. Provider_id and tool_id are resolved during the prepare() method. """ origin_id = self.origin_id visit_date = self.visit_date provider_id = self.origin_metadata['provider']['provider_id'] tool_id = self.origin_metadata['tool']['tool_id'] metadata = self.origin_metadata['metadata'] try: self.send_origin_metadata(origin_id, visit_date, provider_id, tool_id, metadata) except: self.log.exception('Problem when storing origin_metadata') + raise def post_load(self, success=True): """Updating the deposit's status according to its loading status. If not successful, we update its status to failure. Otherwise, we update its status to 'success' and pass along its associated revision. """ try: if not success: self.client.update_deposit_status(self.deposit_update_url, status='failure') return # first retrieve the new revision [rev_id] = self.objects['revision'].keys() if rev_id: rev_id_hex = hashutil.hash_to_hex(rev_id) # then update the deposit's status to success with its # revision-id self.client.update_deposit_status(self.deposit_update_url, status='success', revision_id=rev_id_hex) except: self.log.exception( 'Problem when trying to update the deposit\'s status') def cleanup(self): """Clean up temporary directory where we retrieved the tarball. """ super().cleanup() self.temporary_directory.cleanup() diff --git a/swh/deposit/tests/test_loader.py b/swh/deposit/tests/test_loader.py index f87d1f0a..facead55 100644 --- a/swh/deposit/tests/test_loader.py +++ b/swh/deposit/tests/test_loader.py @@ -1,338 +1,319 @@ # Copyright (C) 2016-2017 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import os import unittest import shutil from nose.tools import istest from nose.plugins.attrib import attr from rest_framework.test import APITestCase from swh.model import hashutil from swh.deposit.injection.loader import DepositLoader, DepositClient from swh.deposit.config import PRIVATE_GET_RAW_CONTENT from swh.deposit.config import PRIVATE_GET_DEPOSIT_METADATA from swh.deposit.config import PRIVATE_PUT_DEPOSIT from django.core.urlresolvers import reverse from . import TEST_LOADER_CONFIG from .common import BasicTestCase, WithAuthTestCase, CommonCreationRoutine from .common import FileSystemCreationRoutine +TOOL_ID = 99 +PROVIDER_ID = 12 + + class DepositLoaderInhibitsStorage: """Mixin class to inhibit the persistence and keep in memory the data sent for storage. cf. SWHDepositLoaderNoStorage """ def __init__(self): super().__init__() # typed data self.state = { 'origin': [], 'origin_visit': [], 'origin_metadata': [], 'content': [], 'directory': [], 'revision': [], 'release': [], 'occurrence': [], 'tool': [], 'provider': [] } def _add(self, type, l): """Add without duplicates and keeping the insertion order. Args: type (str): Type of objects concerned by the action l ([object]): List of 'type' object """ col = self.state[type] for o in l: if o in col: continue col.extend([o]) def send_origin(self, origin): origin.update({'id': 1}) self._add('origin', [origin]) return origin['id'] def send_origin_visit(self, origin_id, visit_date): origin_visit = { 'origin': origin_id, 'visit_date': visit_date, 'visit': 1, } self._add('origin_visit', [origin_visit]) return origin_visit def send_origin_metadata(self, origin_id, visit_date, provider_id, tool_id, metadata): origin_metadata = { 'origin_id': origin_id, 'visit_date': visit_date, 'provider_id': provider_id, 'tool_id': tool_id, 'metadata': metadata } self._add('origin_metadata', [origin_metadata]) return origin_metadata - def send_tool(self, tool_name, tool_version, tool_configuration): + def send_tool(self, tool): tool = { - 'tool_name': tool_name, - 'tool_version': tool_version, - 'tool_configuration': tool_configuration + 'tool_name': tool['tool_name'], + 'tool_version': tool['tool_version'], + 'tool_configuration': tool['tool_configuration'] } self._add('tool', [tool]) - tool_id = len(self.state['tool']) + tool_id = TOOL_ID return tool_id - def send_provider(self, provider_name, provider_type, provider_url, - metadata): + def send_provider(self, provider): provider = { - 'provider_name': provider_name, - 'provider_type': provider_type, - 'provider_url': provider_url, - 'metadata': metadata + 'provider_name': provider['provider_name'], + 'provider_type': provider['provider_type'], + 'provider_url': provider['provider_url'], + 'metadata': provider['metadata'] } self._add('provider', [provider]) - provider_id = len(self.state['provider']) + provider_id = PROVIDER_ID return provider_id def maybe_load_contents(self, contents): self._add('content', contents) def maybe_load_directories(self, directories): self._add('directory', directories) def maybe_load_revisions(self, revisions): self._add('revision', revisions) def maybe_load_releases(self, releases): self._add('release', releases) def maybe_load_occurrences(self, occurrences): self._add('occurrence', occurrences) def open_fetch_history(self): pass def close_fetch_history_failure(self, fetch_history_id): pass def close_fetch_history_success(self, fetch_history_id): pass def update_origin_visit(self, origin_id, visit, status): self.status = status # Override to do nothing at the end def close_failure(self): pass def close_success(self): pass - def prepare_metadata(self): - origin_metadata = self.origin_metadata - - tool = origin_metadata['tool'] - tool_id = len(self.state['tool']) - if tool_id <= 0: - tool_id = self.send_tool(tool['tool_name'], - tool['tool_version'], - tool['tool_configuration']) - self.origin_metadata['tool']['tool_id'] = tool_id - - provider = origin_metadata['provider'] - provider_id = len(self.state['provider']) - if provider_id <= 0: - provider_id = self.send_provider(provider['provider_name'], - provider['provider_type'], - provider['provider_url'], - provider['metadata']) - self.origin_metadata['provider']['provider_id'] = provider_id - class TestLoaderUtils(unittest.TestCase): def assertRevisionsOk(self, expected_revisions): """Check the loader's revisions match the expected revisions. Expects self.loader to be instantiated and ready to be inspected (meaning the loading took place). Args: expected_revisions (dict): Dict with key revision id, value the targeted directory id. """ # The last revision being the one used later to start back from for rev in self.loader.state['revision']: rev_id = hashutil.hash_to_hex(rev['id']) directory_id = hashutil.hash_to_hex(rev['directory']) self.assertEquals(expected_revisions[rev_id], directory_id) class SWHDepositLoaderNoStorage(DepositLoaderInhibitsStorage, DepositLoader): """Loader to test. It inherits from the actual deposit loader to actually test its correct behavior. It also inherits from DepositLoaderInhibitsStorageLoader so that no persistence takes place. """ pass @attr('fs') class DepositLoaderScenarioTest(APITestCase, WithAuthTestCase, BasicTestCase, CommonCreationRoutine, FileSystemCreationRoutine, TestLoaderUtils): def setUp(self): super().setUp() # create the extraction dir used by the loader os.makedirs(TEST_LOADER_CONFIG['extraction_dir'], exist_ok=True) self.server = 'http://localhost/' # 1. create a deposit with archive and metadata self.deposit_id = self.create_simple_binary_deposit() me = self class SWHDepositTestClient(DepositClient): def get_archive(self, archive_update_url, archive_path, log=None): r = me.client.get(archive_update_url) # import os # os.makedirs(os.path.dirname(archive_path), exist_ok=True) with open(archive_path, 'wb') as f: for chunk in r.streaming_content: f.write(chunk) return archive_path def get_metadata(self, metadata_url, log=None): r = me.client.get(metadata_url) - data = json.loads(r.content.decode('utf-8')) - return data + return json.loads(r.content.decode('utf-8')) def update_deposit_status(self, update_status_url, status, revision_id=None): payload = {'status': status} if revision_id: payload['revision_id'] = revision_id me.client.put(update_status_url, content_type='application/json', data=json.dumps(payload)) # 2. setup loader with no persistence self.loader = SWHDepositLoaderNoStorage() # and a basic client which accesses the data # setuped in that test self.loader.client = SWHDepositTestClient() def tearDown(self): super().tearDown() shutil.rmtree(TEST_LOADER_CONFIG['extraction_dir']) @istest def inject_deposit_ready(self): """Load a deposit which is ready """ args = [self.collection.name, self.deposit_id] archive_url = reverse(PRIVATE_GET_RAW_CONTENT, args=args) deposit_meta_url = reverse(PRIVATE_GET_DEPOSIT_METADATA, args=args) deposit_update_url = reverse(PRIVATE_PUT_DEPOSIT, args=args) # when self.loader.load(archive_url=archive_url, deposit_meta_url=deposit_meta_url, deposit_update_url=deposit_update_url) # then self.assertEquals(len(self.loader.state['content']), 1) self.assertEquals(len(self.loader.state['directory']), 1) self.assertEquals(len(self.loader.state['revision']), 1) self.assertEquals(len(self.loader.state['release']), 0) self.assertEquals(len(self.loader.state['occurrence']), 1) # FIXME enrich state introspection # expected_revisions = {} # self.assertRevisionsOk(expected_revisions) @istest def inject_deposit_verify_metadata(self): """Load a deposit with metadata, test metadata integrity """ self.deposit_metadata_id = self.add_metadata_to_deposit( self.deposit_id) args = [self.collection.name, self.deposit_metadata_id] archive_url = reverse(PRIVATE_GET_RAW_CONTENT, args=args) deposit_meta_url = reverse(PRIVATE_GET_DEPOSIT_METADATA, args=args) deposit_update_url = reverse(PRIVATE_PUT_DEPOSIT, args=args) # when self.loader.load(archive_url=archive_url, deposit_meta_url=deposit_meta_url, deposit_update_url=deposit_update_url) # then self.assertEquals(len(self.loader.state['content']), 1) self.assertEquals(len(self.loader.state['directory']), 1) self.assertEquals(len(self.loader.state['revision']), 1) self.assertEquals(len(self.loader.state['release']), 0) self.assertEquals(len(self.loader.state['occurrence']), 1) self.assertEquals(len(self.loader.state['origin_metadata']), 1) self.assertEquals(len(self.loader.state['tool']), 1) self.assertEquals(len(self.loader.state['provider']), 1) atom = '{http://www.w3.org/2005/Atom}' codemeta = '{https://doi.org/10.5063/SCHEMA/CODEMETA-2.0}' expected_origin_metadata = { atom + 'author': { atom + 'email': 'hal@ccsd.cnrs.fr', atom + 'name': 'HAL' }, codemeta + 'url': 'https://hal-test.archives-ouvertes.fr/hal-01243065', codemeta + 'runtimePlatform': 'phpstorm', codemeta + 'license': { codemeta + 'name': 'CeCILL Free Software License Agreement v1.1' }, codemeta + 'programmingLanguage': 'C', codemeta + 'applicationCategory': 'test', codemeta + 'dateCreated': '2017-05-03T16:08:47+02:00', codemeta + 'version': 1, atom + 'external_identifier': 'hal-01243065', atom + 'title': 'Composing a Web of Audio Applications', codemeta + 'description': 'this is the description', atom + 'id': 'hal-01243065', atom + 'client': 'hal', codemeta + 'keywords': 'DSP programming,Web', codemeta + 'developmentStatus': 'stable' } - - self.assertEquals(self.loader.state['origin_metadata'][0]['metadata'], - expected_origin_metadata) - expected_tool_id = self.loader.state['origin_metadata'][0]['tool_id'] - self.assertEquals(expected_tool_id, 1) + result = self.loader.state['origin_metadata'][0] + self.assertEquals(result['metadata'], expected_origin_metadata) + self.assertEquals(result['tool_id'], TOOL_ID) + self.assertEquals(result['provider_id'], PROVIDER_ID)