diff --git a/requirements-swh.txt b/requirements-swh.txt index e5b4b25..53e5adc 100644 --- a/requirements-swh.txt +++ b/requirements-swh.txt @@ -1,5 +1,5 @@ -swh.core >= 0.0.40 +swh.core >= 0.0.41 swh.model >= 0.0.15 swh.objstorage >= 0.0.13 swh.scheduler >= 0.0.14 swh.storage >= 0.0.102 diff --git a/swh/indexer/storage/__init__.py b/swh/indexer/storage/__init__.py index 1bfaa5e..757dc43 100644 --- a/swh/indexer/storage/__init__.py +++ b/swh/indexer/storage/__init__.py @@ -1,541 +1,563 @@ # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import json import psycopg2 from collections import defaultdict +from swh.core.api import remote_api_endpoint from swh.storage.common import db_transaction_generator, db_transaction from swh.storage.exc import StorageDBError from .db import Db from . import converters INDEXER_CFG_KEY = 'indexer_storage' def get_indexer_storage(cls, args): """Get an indexer storage object of class `storage_class` with arguments `storage_args`. Args: args (dict): dictionary with keys: - cls (str): storage's class, either 'local' or 'remote' - args (dict): dictionary with keys Returns: an instance of swh.indexer's storage (either local or remote) Raises: ValueError if passed an unknown storage class. """ if cls == 'remote': from .api.client import RemoteStorage as IndexerStorage elif cls == 'local': from . import IndexerStorage else: raise ValueError('Unknown indexer storage class `%s`' % cls) return IndexerStorage(**args) -class IndexerStorage(): +class IndexerStorage: """SWH Indexer Storage """ def __init__(self, db, min_pool_conns=1, max_pool_conns=10): """ Args: db_conn: either a libpq connection string, or a psycopg2 connection """ try: if isinstance(db, psycopg2.extensions.connection): self._pool = None self._db = Db(db) else: self._pool = psycopg2.pool.ThreadedConnectionPool( min_pool_conns, max_pool_conns, db ) self._db = None except psycopg2.OperationalError as e: raise StorageDBError(e) def get_db(self): if self._db: return self._db return Db.from_pool(self._pool) + @remote_api_endpoint('check_config') def check_config(self, *, check_write): """Check that the storage is configured and ready to go.""" # Check permissions on one of the tables with self.get_db().transaction() as cur: if check_write: check = 'INSERT' else: check = 'SELECT' cur.execute( "select has_table_privilege(current_user, 'content_mimetype', %s)", # noqa (check,) ) return cur.fetchone()[0] return True + @remote_api_endpoint('content_mimetype/missing') @db_transaction_generator() def content_mimetype_missing(self, mimetypes, db=None, cur=None): """List mimetypes missing from storage. Args: mimetypes (iterable): iterable of dict with keys: id (bytes): sha1 identifier indexer_configuration_id (int): tool used to compute the results Yields: an iterable of missing id for the tuple (id, indexer_configuration_id) """ for obj in db.content_mimetype_missing_from_list(mimetypes, cur): yield obj[0] + @remote_api_endpoint('content_mimetype/add') @db_transaction() def content_mimetype_add(self, mimetypes, conflict_update=False, db=None, cur=None): """Add mimetypes not present in storage. Args: mimetypes (iterable): dictionaries with keys: id (bytes): sha1 identifier mimetype (bytes): raw content's mimetype encoding (bytes): raw content's encoding indexer_configuration_id (int): tool's id used to compute the results conflict_update (bool): Flag to determine if we want to overwrite (true) or skip duplicates (false, the default) """ db.mktemp_content_mimetype(cur) db.copy_to(mimetypes, 'tmp_content_mimetype', ['id', 'mimetype', 'encoding', 'indexer_configuration_id'], cur) db.content_mimetype_add_from_temp(conflict_update, cur) + @remote_api_endpoint('content_mimetype') @db_transaction_generator() def content_mimetype_get(self, ids, db=None, cur=None): """Retrieve full content mimetype per ids. Args: ids (iterable): sha1 identifier Yields: mimetypes (iterable): dictionaries with keys: id (bytes): sha1 identifier mimetype (bytes): raw content's mimetype encoding (bytes): raw content's encoding tool (dict): Tool used to compute the language """ for c in db.content_mimetype_get_from_list(ids, cur): yield converters.db_to_mimetype( dict(zip(db.content_mimetype_cols, c))) + @remote_api_endpoint('content_language/missing') @db_transaction_generator() def content_language_missing(self, languages, db=None, cur=None): """List languages missing from storage. Args: languages (iterable): dictionaries with keys: id (bytes): sha1 identifier indexer_configuration_id (int): tool used to compute the results Yields: an iterable of missing id for the tuple (id, indexer_configuration_id) """ for obj in db.content_language_missing_from_list(languages, cur): yield obj[0] + @remote_api_endpoint('content_language') @db_transaction_generator() def content_language_get(self, ids, db=None, cur=None): """Retrieve full content language per ids. Args: ids (iterable): sha1 identifier Yields: languages (iterable): dictionaries with keys: id (bytes): sha1 identifier lang (bytes): raw content's language tool (dict): Tool used to compute the language """ for c in db.content_language_get_from_list(ids, cur): yield converters.db_to_language( dict(zip(db.content_language_cols, c))) + @remote_api_endpoint('content_language/add') @db_transaction() def content_language_add(self, languages, conflict_update=False, db=None, cur=None): """Add languages not present in storage. Args: languages (iterable): dictionaries with keys: id (bytes): sha1 lang (bytes): language detected conflict_update (bool): Flag to determine if we want to overwrite (true) or skip duplicates (false, the default) """ db.mktemp_content_language(cur) # empty language is mapped to 'unknown' db.copy_to( ({ 'id': l['id'], 'lang': 'unknown' if not l['lang'] else l['lang'], 'indexer_configuration_id': l['indexer_configuration_id'], } for l in languages), 'tmp_content_language', ['id', 'lang', 'indexer_configuration_id'], cur) db.content_language_add_from_temp(conflict_update, cur) + @remote_api_endpoint('content/ctags/missing') @db_transaction_generator() def content_ctags_missing(self, ctags, db=None, cur=None): """List ctags missing from storage. Args: ctags (iterable): dicts with keys: id (bytes): sha1 identifier indexer_configuration_id (int): tool used to compute the results Yields: an iterable of missing id for the tuple (id, indexer_configuration_id) """ for obj in db.content_ctags_missing_from_list(ctags, cur): yield obj[0] + @remote_api_endpoint('content/ctags') @db_transaction_generator() def content_ctags_get(self, ids, db=None, cur=None): """Retrieve ctags per id. Args: ids (iterable): sha1 checksums Yields: Dictionaries with keys: id (bytes): content's identifier name (str): symbol's name kind (str): symbol's kind language (str): language for that content tool (dict): tool used to compute the ctags' info """ for c in db.content_ctags_get_from_list(ids, cur): yield converters.db_to_ctags(dict(zip(db.content_ctags_cols, c))) + @remote_api_endpoint('content/ctags/add') @db_transaction() def content_ctags_add(self, ctags, conflict_update=False, db=None, cur=None): """Add ctags not present in storage Args: ctags (iterable): dictionaries with keys: id (bytes): sha1 ctags ([list): List of dictionary with keys: name, kind, line, language """ def _convert_ctags(__ctags): """Convert ctags dict to list of ctags. """ for ctags in __ctags: yield from converters.ctags_to_db(ctags) db.mktemp_content_ctags(cur) db.copy_to(list(_convert_ctags(ctags)), tblname='tmp_content_ctags', columns=['id', 'name', 'kind', 'line', 'lang', 'indexer_configuration_id'], cur=cur) db.content_ctags_add_from_temp(conflict_update, cur) + @remote_api_endpoint('content/ctags/search') @db_transaction_generator() def content_ctags_search(self, expression, limit=10, last_sha1=None, db=None, cur=None): """Search through content's raw ctags symbols. Args: expression (str): Expression to search for limit (int): Number of rows to return (default to 10). last_sha1 (str): Offset from which retrieving data (default to ''). Yields: rows of ctags including id, name, lang, kind, line, etc... """ for obj in db.content_ctags_search(expression, last_sha1, limit, cur=cur): yield converters.db_to_ctags(dict(zip(db.content_ctags_cols, obj))) + @remote_api_endpoint('content/fossology_license') @db_transaction_generator() def content_fossology_license_get(self, ids, db=None, cur=None): """Retrieve licenses per id. Args: ids (iterable): sha1 checksums Yields: list: dictionaries with the following keys: id (bytes) licenses ([str]): associated licenses for that content tool (dict): Tool used to compute the license """ d = defaultdict(list) for c in db.content_fossology_license_get_from_list(ids, cur): license = dict(zip(db.content_fossology_license_cols, c)) id_ = license['id'] d[id_].append(converters.db_to_fossology_license(license)) for id_, facts in d.items(): yield {id_: facts} + @remote_api_endpoint('content/fossology_license/add') @db_transaction() def content_fossology_license_add(self, licenses, conflict_update=False, db=None, cur=None): """Add licenses not present in storage. Args: licenses (iterable): dictionaries with keys: - id: sha1 - license ([bytes]): List of licenses associated to sha1 - tool (str): nomossa conflict_update: Flag to determine if we want to overwrite (true) or skip duplicates (false, the default) Returns: list: content_license entries which failed due to unknown licenses """ # Then, we add the correct ones db.mktemp_content_fossology_license(cur) db.copy_to( ({ 'id': sha1['id'], 'indexer_configuration_id': sha1['indexer_configuration_id'], 'license': license, } for sha1 in licenses for license in sha1['licenses']), tblname='tmp_content_fossology_license', columns=['id', 'license', 'indexer_configuration_id'], cur=cur) db.content_fossology_license_add_from_temp(conflict_update, cur) + @remote_api_endpoint('content_metadata/missing') @db_transaction_generator() def content_metadata_missing(self, metadata, db=None, cur=None): """List metadata missing from storage. Args: metadata (iterable): dictionaries with keys: id (bytes): sha1 identifier indexer_configuration_id (int): tool used to compute the results Yields: an iterable of missing id for the tuple (id, indexer_configuration_id) """ for obj in db.content_metadata_missing_from_list(metadata, cur): yield obj[0] + @remote_api_endpoint('content_metadata') @db_transaction_generator() def content_metadata_get(self, ids, db=None, cur=None): """Retrieve metadata per id. Args: ids (iterable): sha1 checksums Yields: list: dictionaries with the following keys: id (bytes) translated_metadata (str): associated metadata tool (dict): tool used to compute metadata """ for c in db.content_metadata_get_from_list(ids, cur): yield converters.db_to_metadata( dict(zip(db.content_metadata_cols, c))) + @remote_api_endpoint('content_metadata/add') @db_transaction() def content_metadata_add(self, metadata, conflict_update=False, db=None, cur=None): """Add metadata not present in storage. Args: metadata (iterable): dictionaries with keys: id: sha1 translated_metadata: bytes / jsonb ? conflict_update: Flag to determine if we want to overwrite (true) or skip duplicates (false, the default) """ db.mktemp_content_metadata(cur) # empty metadata is mapped to 'unknown' db.copy_to(metadata, 'tmp_content_metadata', ['id', 'translated_metadata', 'indexer_configuration_id'], cur) db.content_metadata_add_from_temp(conflict_update, cur) + @remote_api_endpoint('revision_metadata/missing') @db_transaction_generator() def revision_metadata_missing(self, metadata, db=None, cur=None): """List metadata missing from storage. Args: metadata (iterable): dictionaries with keys: id (bytes): sha1_git revision identifier indexer_configuration_id (int): tool used to compute the results Returns: iterable: missing ids """ for obj in db.revision_metadata_missing_from_list(metadata, cur): yield obj[0] + @remote_api_endpoint('revision_metadata') @db_transaction_generator() def revision_metadata_get(self, ids, db=None, cur=None): """Retrieve revision metadata per id. Args: ids (iterable): sha1 checksums Yields: list: dictionaries with the following keys: id (bytes) translated_metadata (str): associated metadata tool (dict): tool used to compute metadata """ for c in db.revision_metadata_get_from_list(ids, cur): yield converters.db_to_metadata( dict(zip(db.revision_metadata_cols, c))) + @remote_api_endpoint('revision_metadata/add') @db_transaction() def revision_metadata_add(self, metadata, conflict_update=False, db=None, cur=None): """Add metadata not present in storage. Args: metadata (iterable): dictionaries with keys: - id: sha1_git of revision - translated_metadata: bytes / jsonb ? conflict_update: Flag to determine if we want to overwrite (true) or skip duplicates (false, the default) """ db.mktemp_revision_metadata(cur) # empty metadata is mapped to 'unknown' db.copy_to(metadata, 'tmp_revision_metadata', ['id', 'translated_metadata', 'indexer_configuration_id'], cur) db.revision_metadata_add_from_temp(conflict_update, cur) + @remote_api_endpoint('indexer_configuration/add') @db_transaction_generator() def indexer_configuration_add(self, tools, db=None, cur=None): """Add new tools to the storage. Args: tools ([dict]): List of dictionary representing tool to insert in the db. Dictionary with the following keys:: tool_name (str): tool's name tool_version (str): tool's version tool_configuration (dict): tool's configuration (free form dict) Returns: List of dict inserted in the db (holding the id key as well). The order of the list is not guaranteed to match the order of the initial list. """ db.mktemp_indexer_configuration(cur) db.copy_to(tools, 'tmp_indexer_configuration', ['tool_name', 'tool_version', 'tool_configuration'], cur) tools = db.indexer_configuration_add_from_temp(cur) for line in tools: yield dict(zip(db.indexer_configuration_cols, line)) + @remote_api_endpoint('indexer_configuration/data') @db_transaction() def indexer_configuration_get(self, tool, db=None, cur=None): """Retrieve tool information. Args: tool (dict): Dictionary representing a tool with the following keys:: tool_name (str): tool's name tool_version (str): tool's version tool_configuration (dict): tool's configuration (free form dict) Returns: The identifier of the tool if it exists, None otherwise. """ tool_conf = tool['tool_configuration'] if isinstance(tool_conf, dict): tool_conf = json.dumps(tool_conf) idx = db.indexer_configuration_get(tool['tool_name'], tool['tool_version'], tool_conf) if not idx: return None return dict(zip(db.indexer_configuration_cols, idx)) diff --git a/swh/indexer/storage/api/client.py b/swh/indexer/storage/api/client.py index 004d323..7dc616d 100644 --- a/swh/indexer/storage/api/client.py +++ b/swh/indexer/storage/api/client.py @@ -1,101 +1,20 @@ # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information - from swh.core.api import SWHRemoteAPI from swh.storage.exc import StorageAPIError +from .. import IndexerStorage + class RemoteStorage(SWHRemoteAPI): """Proxy to a remote storage API""" + + backend_class = IndexerStorage + def __init__(self, url, timeout=None): super().__init__( api_exception=StorageAPIError, url=url, timeout=timeout) - - def check_config(self, *, check_write): - return self.post('check_config', {'check_write': check_write}) - - def content_mimetype_add(self, mimetypes, conflict_update=False): - return self.post('content_mimetype/add', { - 'mimetypes': mimetypes, - 'conflict_update': conflict_update, - }) - - def content_mimetype_missing(self, mimetypes): - return self.post('content_mimetype/missing', {'mimetypes': mimetypes}) - - def content_mimetype_get(self, ids): - return self.post('content_mimetype', {'ids': ids}) - - def content_language_add(self, languages, conflict_update=False): - return self.post('content_language/add', { - 'languages': languages, - 'conflict_update': conflict_update, - }) - - def content_language_missing(self, languages): - return self.post('content_language/missing', {'languages': languages}) - - def content_language_get(self, ids): - return self.post('content_language', {'ids': ids}) - - def content_ctags_add(self, ctags, conflict_update=False): - return self.post('content/ctags/add', { - 'ctags': ctags, - 'conflict_update': conflict_update, - }) - - def content_ctags_missing(self, ctags): - return self.post('content/ctags/missing', {'ctags': ctags}) - - def content_ctags_get(self, ids): - return self.post('content/ctags', {'ids': ids}) - - def content_ctags_search(self, expression, limit=10, last_sha1=None): - return self.post('content/ctags/search', { - 'expression': expression, - 'limit': limit, - 'last_sha1': last_sha1, - }) - - def content_fossology_license_add(self, licenses, conflict_update=False): - return self.post('content/fossology_license/add', { - 'licenses': licenses, - 'conflict_update': conflict_update, - }) - - def content_fossology_license_get(self, ids): - return self.post('content/fossology_license', {'ids': ids}) - - def content_metadata_add(self, metadata, conflict_update=False): - return self.post('content_metadata/add', { - 'metadata': metadata, - 'conflict_update': conflict_update, - }) - - def content_metadata_missing(self, metadata): - return self.post('content_metadata/missing', {'metadata': metadata}) - - def content_metadata_get(self, ids): - return self.post('content_metadata', {'ids': ids}) - - def revision_metadata_add(self, metadata, conflict_update=False): - return self.post('revision_metadata/add', { - 'metadata': metadata, - 'conflict_update': conflict_update, - }) - - def revision_metadata_missing(self, metadata): - return self.post('revision_metadata/missing', {'metadata': metadata}) - - def revision_metadata_get(self, ids): - return self.post('revision_metadata', {'ids': ids}) - - def indexer_configuration_add(self, tools): - return self.post('indexer_configuration/add', {'tools': tools}) - - def indexer_configuration_get(self, tool): - return self.post('indexer_configuration/data', {'tool': tool}) diff --git a/swh/indexer/storage/api/server.py b/swh/indexer/storage/api/server.py index 4d64c72..912fccc 100644 --- a/swh/indexer/storage/api/server.py +++ b/swh/indexer/storage/api/server.py @@ -1,199 +1,75 @@ # Copyright (C) 2015-2018 The Software Heritage developers # See the AUTHORS file at the top-level directory of this distribution # License: GNU General Public License version 3, or any later version # See top-level LICENSE file for more information import logging import click -from flask import request - from swh.core import config -from swh.core.api import (SWHServerAPIApp, decode_request, - error_handler, +from swh.core.api import (SWHServerAPIApp, error_handler, encode_data_server as encode_data) from swh.indexer.storage import get_indexer_storage, INDEXER_CFG_KEY +from .. import IndexerStorage + DEFAULT_CONFIG_PATH = 'storage/indexer' DEFAULT_CONFIG = { INDEXER_CFG_KEY: ('dict', { 'cls': 'local', 'args': { 'db': 'dbname=softwareheritage-indexer-dev', }, }) } -app = SWHServerAPIApp(__name__) -storage = None - - -@app.errorhandler(Exception) -def my_error_handler(exception): - return error_handler(exception, encode_data) - - def get_storage(): global storage if not storage: storage = get_indexer_storage(**app.config[INDEXER_CFG_KEY]) return storage -@app.route('/') -def index(): - return 'SWH Indexer Storage API server' - - -@app.route('/check_config', methods=['POST']) -def check_config(): - return encode_data(get_storage().check_config(**decode_request(request))) - - -@app.route('/content_mimetype/add', methods=['POST']) -def content_mimetype_add(): - return encode_data( - get_storage().content_mimetype_add(**decode_request(request))) - - -@app.route('/content_mimetype/missing', methods=['POST']) -def content_mimetype_missing(): - return encode_data( - get_storage().content_mimetype_missing(**decode_request(request))) - - -@app.route('/content_mimetype', methods=['POST']) -def content_mimetype_get(): - return encode_data( - get_storage().content_mimetype_get(**decode_request(request))) - - -@app.route('/content_language/add', methods=['POST']) -def content_language_add(): - return encode_data( - get_storage().content_language_add(**decode_request(request))) - - -@app.route('/content_language/missing', methods=['POST']) -def content_language_missing(): - return encode_data( - get_storage().content_language_missing(**decode_request(request))) - - -@app.route('/content_language', methods=['POST']) -def content_language_get(): - return encode_data( - get_storage().content_language_get(**decode_request(request))) - - -@app.route('/content/ctags/add', methods=['POST']) -def content_ctags_add(): - return encode_data( - get_storage().content_ctags_add(**decode_request(request))) - - -@app.route('/content/ctags/search', methods=['POST']) -def content_ctags_search(): - return encode_data( - get_storage().content_ctags_search(**decode_request(request))) - - -@app.route('/content/ctags/missing', methods=['POST']) -def content_ctags_missing(): - return encode_data( - get_storage().content_ctags_missing(**decode_request(request))) - - -@app.route('/content/ctags', methods=['POST']) -def content_ctags_get(): - return encode_data( - get_storage().content_ctags_get(**decode_request(request))) - - -@app.route('/content/fossology_license/add', methods=['POST']) -def content_fossology_license_add(): - return encode_data( - get_storage().content_fossology_license_add(**decode_request(request))) - - -@app.route('/content/fossology_license', methods=['POST']) -def content_fossology_license_get(): - return encode_data( - get_storage().content_fossology_license_get(**decode_request(request))) - - -@app.route('/indexer_configuration/data', methods=['POST']) -def indexer_configuration_get(): - return encode_data(get_storage().indexer_configuration_get( - **decode_request(request))) - - -@app.route('/indexer_configuration/add', methods=['POST']) -def indexer_configuration_add(): - return encode_data(get_storage().indexer_configuration_add( - **decode_request(request))) - - -@app.route('/content_metadata/add', methods=['POST']) -def content_metadata_add(): - return encode_data( - get_storage().content_metadata_add(**decode_request(request))) - - -@app.route('/content_metadata/missing', methods=['POST']) -def content_metadata_missing(): - return encode_data( - get_storage().content_metadata_missing(**decode_request(request))) - - -@app.route('/content_metadata', methods=['POST']) -def content_metadata_get(): - return encode_data( - get_storage().content_metadata_get(**decode_request(request))) - - -@app.route('/revision_metadata/add', methods=['POST']) -def revision_metadata_add(): - return encode_data( - get_storage().revision_metadata_add(**decode_request(request))) +app = SWHServerAPIApp(__name__, + backend_class=IndexerStorage, + backend_factory=get_storage) +storage = None -@app.route('/revision_metadata/missing', methods=['POST']) -def revision_metadata_missing(): - return encode_data( - get_storage().revision_metadata_missing(**decode_request(request))) +@app.errorhandler(Exception) +def my_error_handler(exception): + return error_handler(exception, encode_data) -@app.route('/revision_metadata', methods=['POST']) -def revision_metadata_get(): - return encode_data( - get_storage().revision_metadata_get(**decode_request(request))) +@app.route('/') +def index(): + return 'SWH Indexer Storage API server' def run_from_webserver(environ, start_response, config_path=DEFAULT_CONFIG_PATH): """Run the WSGI app from the webserver, loading the configuration.""" cfg = config.load_named_config(config_path, DEFAULT_CONFIG) app.config.update(cfg) handler = logging.StreamHandler() app.logger.addHandler(handler) return app(environ, start_response) @click.command() @click.option('--host', default='0.0.0.0', help="Host to run the server") @click.option('--port', default=5007, type=click.INT, help="Binding port of the server") @click.option('--debug/--nodebug', default=True, help="Indicates if the server should run in debug mode") def launch(host, port, debug): cfg = config.load_named_config(DEFAULT_CONFIG_PATH, DEFAULT_CONFIG) app.config.update(cfg) app.run(host, port=int(port), debug=bool(debug)) if __name__ == '__main__': launch()