Page Menu
Home
Software Heritage
Search
Configure Global Search
Log In
Files
F9343144
No One
Temporary
Actions
View File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Flag For Later
Size
199 KB
Subscribers
None
View Options
diff --git a/swh/storage/api/client.py b/swh/storage/api/client.py
index db177ed..a01587e 100644
--- a/swh/storage/api/client.py
+++ b/swh/storage/api/client.py
@@ -1,214 +1,220 @@
# Copyright (C) 2015-2017 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
from swh.core.api import SWHRemoteAPI
from ..exc import StorageAPIError
class RemoteStorage(SWHRemoteAPI):
"""Proxy to a remote storage API"""
def __init__(self, url):
super().__init__(api_exception=StorageAPIError, url=url)
def check_config(self, *, check_write):
return self.post('check_config', {'check_write': check_write})
def content_add(self, content):
return self.post('content/add', {'content': content})
def content_update(self, content, keys=[]):
return self.post('content/update', {'content': content,
'keys': keys})
def content_missing(self, content, key_hash='sha1'):
return self.post('content/missing', {'content': content,
'key_hash': key_hash})
def content_missing_per_sha1(self, contents):
return self.post('content/missing/sha1', {'contents': contents})
def content_get(self, content):
return self.post('content/data', {'content': content})
def content_get_metadata(self, content):
return self.post('content/metadata', {'content': content})
def content_find(self, content):
return self.post('content/present', {'content': content})
def directory_add(self, directories):
return self.post('directory/add', {'directories': directories})
def directory_missing(self, directories):
return self.post('directory/missing', {'directories': directories})
def directory_get(self, directories):
return self.post('directory', dict(directories=directories))
def directory_ls(self, directory, recursive=False):
return self.get('directory/ls', {'directory': directory,
'recursive': recursive})
def revision_get(self, revisions):
return self.post('revision', {'revisions': revisions})
def revision_get_by(self, origin_id, branch_name, timestamp, limit=None):
return self.post('revision/by', dict(origin_id=origin_id,
branch_name=branch_name,
timestamp=timestamp,
limit=limit))
def revision_log(self, revisions, limit=None):
return self.post('revision/log', {'revisions': revisions,
'limit': limit})
def revision_log_by(self, origin_id, branch_name, timestamp, limit=None):
return self.post('revision/logby', {'origin_id': origin_id,
'branch_name': branch_name,
'timestamp': timestamp,
'limit': limit})
def revision_shortlog(self, revisions, limit=None):
return self.post('revision/shortlog', {'revisions': revisions,
'limit': limit})
def revision_add(self, revisions):
return self.post('revision/add', {'revisions': revisions})
def revision_missing(self, revisions):
return self.post('revision/missing', {'revisions': revisions})
def release_add(self, releases):
return self.post('release/add', {'releases': releases})
def release_get(self, releases):
return self.post('release', {'releases': releases})
def release_get_by(self, origin_id, limit=None):
return self.post('release/by', dict(origin_id=origin_id,
limit=limit))
def release_missing(self, releases):
return self.post('release/missing', {'releases': releases})
def object_find_by_sha1_git(self, ids):
return self.post('object/find_by_sha1_git', {'ids': ids})
def occurrence_get(self, origin_id):
return self.post('occurrence', {'origin_id': origin_id})
def occurrence_add(self, occurrences):
return self.post('occurrence/add', {'occurrences': occurrences})
def snapshot_add(self, origin, visit, snapshot, back_compat=True):
return self.post('snapshot/add', {
'origin': origin, 'visit': visit, 'snapshot': snapshot,
'back_compat': back_compat})
def snapshot_get(self, snapshot_id):
return self.post('snapshot', {'snapshot_id': snapshot_id})
def snapshot_get_by_origin_visit(self, origin, visit):
return self.post('snapshot/by_origin_visit', {'origin': origin,
'visit': visit})
+ def snapshot_get_latest(self, origin, allowed_statuses=None):
+ return self.post('snapshot/latest', {
+ 'origin': origin,
+ 'allowed_statuses': allowed_statuses
+ })
+
def origin_get(self, origin):
return self.post('origin/get', {'origin': origin})
def origin_search(self, url_pattern, offset=0, limit=50, regexp=False):
return self.post('origin/search', {'url_pattern': url_pattern,
'offset': offset,
'limit': limit,
'regexp': regexp})
def origin_add(self, origins):
return self.post('origin/add_multi', {'origins': origins})
def origin_add_one(self, origin):
return self.post('origin/add', {'origin': origin})
def origin_visit_add(self, origin, ts):
return self.post('origin/visit/add', {'origin': origin, 'ts': ts})
def origin_visit_update(self, origin, visit_id, status, metadata=None):
return self.post('origin/visit/update', {'origin': origin,
'visit_id': visit_id,
'status': status,
'metadata': metadata})
def origin_visit_get(self, origin, last_visit=None, limit=None):
return self.post('origin/visit/get', {
'origin': origin, 'last_visit': last_visit, 'limit': limit})
def origin_visit_get_by(self, origin, visit):
return self.post('origin/visit/getby', {'origin': origin,
'visit': visit})
def person_get(self, person):
return self.post('person', {'person': person})
def fetch_history_start(self, origin_id):
return self.post('fetch_history/start', {'origin_id': origin_id})
def fetch_history_end(self, fetch_history_id, data):
return self.post('fetch_history/end',
{'fetch_history_id': fetch_history_id,
'data': data})
def fetch_history_get(self, fetch_history_id):
return self.get('fetch_history', {'id': fetch_history_id})
def entity_add(self, entities):
return self.post('entity/add', {'entities': entities})
def entity_get(self, uuid):
return self.post('entity/get', {'uuid': uuid})
def entity_get_one(self, uuid):
return self.get('entity', {'uuid': uuid})
def entity_get_from_lister_metadata(self, entities):
return self.post('entity/from_lister_metadata', {'entities': entities})
def stat_counters(self):
return self.get('stat/counters')
def directory_entry_get_by_path(self, directory, paths):
return self.post('directory/path', dict(directory=directory,
paths=paths))
def tool_add(self, tools):
return self.post('tool/add', {'tools': tools})
def tool_get(self, tool):
return self.post('tool/data', {'tool': tool})
def origin_metadata_add(self, origin_id, ts, provider, tool, metadata):
return self.post('origin/metadata/add', {'origin_id': origin_id,
'ts': ts,
'provider': provider,
'tool': tool,
'metadata': metadata})
def origin_metadata_get_by(self, origin_id, provider_type=None):
return self.post('origin/metadata/get', {
'origin_id': origin_id,
'provider_type': provider_type
})
def metadata_provider_add(self, provider_name, provider_type, provider_url,
metadata):
return self.post('provider/add', {'provider_name': provider_name,
'provider_type': provider_type,
'provider_url': provider_url,
'metadata': metadata})
def metadata_provider_get(self, provider_id):
return self.post('provider/get', {'provider_id': provider_id})
def metadata_provider_get_by(self, provider):
return self.post('provider/getby', {'provider': provider})
diff --git a/swh/storage/api/server.py b/swh/storage/api/server.py
index 9d7b556..0db44ad 100644
--- a/swh/storage/api/server.py
+++ b/swh/storage/api/server.py
@@ -1,368 +1,374 @@
# Copyright (C) 2015-2017 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import json
import logging
import click
from flask import g, request
from swh.core import config
from swh.storage import get_storage
from swh.core.api import (SWHServerAPIApp, decode_request,
error_handler,
encode_data_server as encode_data)
DEFAULT_CONFIG_PATH = 'storage/storage'
DEFAULT_CONFIG = {
'storage': ('dict', {
'cls': 'local',
'args': {
'db': 'dbname=softwareheritage-dev',
'objstorage': {
'cls': 'pathslicing',
'args': {
'root': '/srv/softwareheritage/objects',
'slicing': '0:2/2:4/4:6',
},
},
},
})
}
app = SWHServerAPIApp(__name__)
@app.errorhandler(Exception)
def my_error_handler(exception):
return error_handler(exception, encode_data)
@app.before_request
def before_request():
g.storage = get_storage(**app.config['storage'])
@app.route('/')
def index():
return 'SWH Storage API server'
@app.route('/check_config', methods=['POST'])
def check_config():
return encode_data(g.storage.check_config(**decode_request(request)))
@app.route('/content/missing', methods=['POST'])
def content_missing():
return encode_data(g.storage.content_missing(**decode_request(request)))
@app.route('/content/missing/sha1', methods=['POST'])
def content_missing_per_sha1():
return encode_data(g.storage.content_missing_per_sha1(
**decode_request(request)))
@app.route('/content/present', methods=['POST'])
def content_find():
return encode_data(g.storage.content_find(**decode_request(request)))
@app.route('/content/add', methods=['POST'])
def content_add():
return encode_data(g.storage.content_add(**decode_request(request)))
@app.route('/content/update', methods=['POST'])
def content_update():
return encode_data(g.storage.content_update(**decode_request(request)))
@app.route('/content/data', methods=['POST'])
def content_get():
return encode_data(g.storage.content_get(**decode_request(request)))
@app.route('/content/metadata', methods=['POST'])
def content_get_metadata():
return encode_data(g.storage.content_get_metadata(
**decode_request(request)))
@app.route('/directory', methods=['POST'])
def directory_get():
return encode_data(g.storage.directory_get(**decode_request(request)))
@app.route('/directory/missing', methods=['POST'])
def directory_missing():
return encode_data(g.storage.directory_missing(**decode_request(request)))
@app.route('/directory/add', methods=['POST'])
def directory_add():
return encode_data(g.storage.directory_add(**decode_request(request)))
@app.route('/directory/path', methods=['POST'])
def directory_entry_get_by_path():
return encode_data(g.storage.directory_entry_get_by_path(
**decode_request(request)))
@app.route('/directory/ls', methods=['GET'])
def directory_ls():
dir = request.args['directory'].encode('utf-8', 'surrogateescape')
rec = json.loads(request.args.get('recursive', 'False').lower())
return encode_data(g.storage.directory_ls(dir, recursive=rec))
@app.route('/revision/add', methods=['POST'])
def revision_add():
return encode_data(g.storage.revision_add(**decode_request(request)))
@app.route('/revision', methods=['POST'])
def revision_get():
return encode_data(g.storage.revision_get(**decode_request(request)))
@app.route('/revision/by', methods=['POST'])
def revision_get_by():
return encode_data(g.storage.revision_get_by(**decode_request(request)))
@app.route('/revision/log', methods=['POST'])
def revision_log():
return encode_data(g.storage.revision_log(**decode_request(request)))
@app.route('/revision/logby', methods=['POST'])
def revision_log_by():
return encode_data(g.storage.revision_log_by(**decode_request(request)))
@app.route('/revision/shortlog', methods=['POST'])
def revision_shortlog():
return encode_data(g.storage.revision_shortlog(**decode_request(request)))
@app.route('/revision/missing', methods=['POST'])
def revision_missing():
return encode_data(g.storage.revision_missing(**decode_request(request)))
@app.route('/release/add', methods=['POST'])
def release_add():
return encode_data(g.storage.release_add(**decode_request(request)))
@app.route('/release', methods=['POST'])
def release_get():
return encode_data(g.storage.release_get(**decode_request(request)))
@app.route('/release/by', methods=['POST'])
def release_get_by():
return encode_data(g.storage.release_get_by(**decode_request(request)))
@app.route('/release/missing', methods=['POST'])
def release_missing():
return encode_data(g.storage.release_missing(**decode_request(request)))
@app.route('/object/find_by_sha1_git', methods=['POST'])
def object_find_by_sha1_git():
return encode_data(g.storage.object_find_by_sha1_git(
**decode_request(request)))
@app.route('/occurrence', methods=['POST'])
def occurrence_get():
return encode_data(g.storage.occurrence_get(**decode_request(request)))
@app.route('/occurrence/add', methods=['POST'])
def occurrence_add():
return encode_data(g.storage.occurrence_add(**decode_request(request)))
@app.route('/snapshot/add', methods=['POST'])
def snapshot_add():
return encode_data(g.storage.snapshot_add(**decode_request(request)))
@app.route('/snapshot', methods=['POST'])
def snapshot_get():
return encode_data(g.storage.snapshot_get(**decode_request(request)))
@app.route('/snapshot/by_origin_visit', methods=['POST'])
def snapshot_get_by_origin_visit():
return encode_data(g.storage.snapshot_get_by_origin_visit(
**decode_request(request)))
+@app.route('/snapshot/latest', methods=['POST'])
+def snapshot_get_latest():
+ return encode_data(g.storage.snapshot_get_latest(
+ **decode_request(request)))
+
+
@app.route('/origin/get', methods=['POST'])
def origin_get():
return encode_data(g.storage.origin_get(**decode_request(request)))
@app.route('/origin/search', methods=['POST'])
def origin_search():
return encode_data(g.storage.origin_search(**decode_request(request)))
@app.route('/origin/add_multi', methods=['POST'])
def origin_add():
return encode_data(g.storage.origin_add(**decode_request(request)))
@app.route('/origin/add', methods=['POST'])
def origin_add_one():
return encode_data(g.storage.origin_add_one(**decode_request(request)))
@app.route('/origin/visit/get', methods=['POST'])
def origin_visit_get():
return encode_data(g.storage.origin_visit_get(**decode_request(request)))
@app.route('/origin/visit/getby', methods=['POST'])
def origin_visit_get_by():
return encode_data(
g.storage.origin_visit_get_by(**decode_request(request)))
@app.route('/origin/visit/add', methods=['POST'])
def origin_visit_add():
return encode_data(g.storage.origin_visit_add(**decode_request(request)))
@app.route('/origin/visit/update', methods=['POST'])
def origin_visit_update():
return encode_data(g.storage.origin_visit_update(
**decode_request(request)))
@app.route('/person', methods=['POST'])
def person_get():
return encode_data(g.storage.person_get(**decode_request(request)))
@app.route('/fetch_history', methods=['GET'])
def fetch_history_get():
return encode_data(g.storage.fetch_history_get(request.args['id']))
@app.route('/fetch_history/start', methods=['POST'])
def fetch_history_start():
return encode_data(
g.storage.fetch_history_start(**decode_request(request)))
@app.route('/fetch_history/end', methods=['POST'])
def fetch_history_end():
return encode_data(
g.storage.fetch_history_end(**decode_request(request)))
@app.route('/entity/add', methods=['POST'])
def entity_add():
return encode_data(
g.storage.entity_add(**decode_request(request)))
@app.route('/entity/get', methods=['POST'])
def entity_get():
return encode_data(
g.storage.entity_get(**decode_request(request)))
@app.route('/entity', methods=['GET'])
def entity_get_one():
return encode_data(g.storage.entity_get_one(request.args['uuid']))
@app.route('/entity/from_lister_metadata', methods=['POST'])
def entity_from_lister_metadata():
return encode_data(
g.storage.entity_get_from_lister_metadata(**decode_request(request)))
@app.route('/tool/data', methods=['POST'])
def tool_get():
return encode_data(g.storage.tool_get(
**decode_request(request)))
@app.route('/tool/add', methods=['POST'])
def tool_add():
return encode_data(g.storage.tool_add(
**decode_request(request)))
@app.route('/origin/metadata/add', methods=['POST'])
def origin_metadata_add():
return encode_data(g.storage.origin_metadata_add(**decode_request(
request)))
@app.route('/origin/metadata/get', methods=['POST'])
def origin_metadata_get_by():
return encode_data(g.storage.origin_metadata_get_by(**decode_request(
request)))
@app.route('/provider/add', methods=['POST'])
def metadata_provider_add():
return encode_data(g.storage.metadata_provider_add(**decode_request(
request)))
@app.route('/provider/get', methods=['POST'])
def metadata_provider_get():
return encode_data(g.storage.metadata_provider_get(**decode_request(
request)))
@app.route('/provider/getby', methods=['POST'])
def metadata_provider_get_by():
return encode_data(g.storage.metadata_provider_get_by(**decode_request(
request)))
@app.route('/stat/counters', methods=['GET'])
def stat_counters():
return encode_data(g.storage.stat_counters())
def run_from_webserver(environ, start_response,
config_path=DEFAULT_CONFIG_PATH):
"""Run the WSGI app from the webserver, loading the configuration."""
cfg = config.load_named_config(config_path, DEFAULT_CONFIG)
app.config.update(cfg)
handler = logging.StreamHandler()
app.logger.addHandler(handler)
return app(environ, start_response)
@click.command()
@click.argument('config-path', required=1)
@click.option('--host', default='0.0.0.0', help="Host to run the server")
@click.option('--port', default=5002, type=click.INT,
help="Binding port of the server")
@click.option('--debug/--nodebug', default=True,
help="Indicates if the server should run in debug mode")
def launch(config_path, host, port, debug):
app.config.update(config.read(config_path, DEFAULT_CONFIG))
app.run(host, port=int(port), debug=bool(debug))
if __name__ == '__main__':
launch()
diff --git a/swh/storage/db.py b/swh/storage/db.py
index f75572f..25166b6 100644
--- a/swh/storage/db.py
+++ b/swh/storage/db.py
@@ -1,1002 +1,1038 @@
# Copyright (C) 2015-2017 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import binascii
import datetime
import enum
import functools
import json
import os
import select
import threading
from contextlib import contextmanager
import psycopg2
import psycopg2.extras
TMP_CONTENT_TABLE = 'tmp_content'
psycopg2.extras.register_uuid()
def stored_procedure(stored_proc):
"""decorator to execute remote stored procedure, specified as argument
Generally, the body of the decorated function should be empty. If it is
not, the stored procedure will be executed first; the function body then.
"""
def wrap(meth):
@functools.wraps(meth)
def _meth(self, *args, **kwargs):
cur = kwargs.get('cur', None)
self._cursor(cur).execute('SELECT %s()' % stored_proc)
meth(self, *args, **kwargs)
return _meth
return wrap
def jsonize(value):
"""Convert a value to a psycopg2 JSON object if necessary"""
if isinstance(value, dict):
return psycopg2.extras.Json(value)
return value
def entry_to_bytes(entry):
"""Convert an entry coming from the database to bytes"""
if isinstance(entry, memoryview):
return entry.tobytes()
if isinstance(entry, list):
return [entry_to_bytes(value) for value in entry]
return entry
def line_to_bytes(line):
"""Convert a line coming from the database to bytes"""
if not line:
return line
if isinstance(line, dict):
return {k: entry_to_bytes(v) for k, v in line.items()}
return line.__class__(entry_to_bytes(entry) for entry in line)
def cursor_to_bytes(cursor):
"""Yield all the data from a cursor as bytes"""
yield from (line_to_bytes(line) for line in cursor)
class BaseDb:
"""Base class for swh.storage.*Db.
cf. swh.storage.db.Db, swh.archiver.db.ArchiverDb
"""
@classmethod
def connect(cls, *args, **kwargs):
"""factory method to create a DB proxy
Accepts all arguments of psycopg2.connect; only some specific
possibilities are reported below.
Args:
connstring: libpq2 connection string
"""
conn = psycopg2.connect(*args, **kwargs)
return cls(conn)
def _cursor(self, cur_arg):
"""get a cursor: from cur_arg if given, or a fresh one otherwise
meant to avoid boilerplate if/then/else in methods that proxy stored
procedures
"""
if cur_arg is not None:
return cur_arg
# elif self.cur is not None:
# return self.cur
else:
return self.conn.cursor()
def __init__(self, conn):
"""create a DB proxy
Args:
conn: psycopg2 connection to the SWH DB
"""
self.conn = conn
@contextmanager
def transaction(self):
"""context manager to execute within a DB transaction
Yields:
a psycopg2 cursor
"""
with self.conn.cursor() as cur:
try:
yield cur
self.conn.commit()
except Exception:
if not self.conn.closed:
self.conn.rollback()
raise
def copy_to(self, items, tblname, columns, cur=None, item_cb=None):
"""Copy items' entries to table tblname with columns information.
Args:
items (dict): dictionary of data to copy over tblname
tblname (str): Destination table's name
columns ([str]): keys to access data in items and also the
column names in the destination table.
item_cb (fn): optional function to apply to items's entry
"""
def escape(data):
if data is None:
return ''
if isinstance(data, bytes):
return '\\x%s' % binascii.hexlify(data).decode('ascii')
elif isinstance(data, str):
return '"%s"' % data.replace('"', '""')
elif isinstance(data, datetime.datetime):
# We escape twice to make sure the string generated by
# isoformat gets escaped
return escape(data.isoformat())
elif isinstance(data, dict):
return escape(json.dumps(data))
elif isinstance(data, list):
return escape("{%s}" % ','.join(escape(d) for d in data))
elif isinstance(data, psycopg2.extras.Range):
# We escape twice here too, so that we make sure
# everything gets passed to copy properly
return escape(
'%s%s,%s%s' % (
'[' if data.lower_inc else '(',
'-infinity' if data.lower_inf else escape(data.lower),
'infinity' if data.upper_inf else escape(data.upper),
']' if data.upper_inc else ')',
)
)
elif isinstance(data, enum.IntEnum):
return escape(int(data))
else:
# We don't escape here to make sure we pass literals properly
return str(data)
read_file, write_file = os.pipe()
def writer():
cursor = self._cursor(cur)
with open(read_file, 'r') as f:
cursor.copy_expert('COPY %s (%s) FROM STDIN CSV' % (
tblname, ', '.join(columns)), f)
write_thread = threading.Thread(target=writer)
write_thread.start()
with open(write_file, 'w') as f:
for d in items:
if item_cb is not None:
item_cb(d)
line = [escape(d.get(k)) for k in columns]
f.write(','.join(line))
f.write('\n')
write_thread.join()
def mktemp(self, tblname, cur=None):
self._cursor(cur).execute('SELECT swh_mktemp(%s)', (tblname,))
class Db(BaseDb):
"""Proxy to the SWH DB, with wrappers around stored procedures
"""
def mktemp_dir_entry(self, entry_type, cur=None):
self._cursor(cur).execute('SELECT swh_mktemp_dir_entry(%s)',
(('directory_entry_%s' % entry_type),))
@stored_procedure('swh_mktemp_revision')
def mktemp_revision(self, cur=None): pass
@stored_procedure('swh_mktemp_release')
def mktemp_release(self, cur=None): pass
@stored_procedure('swh_mktemp_occurrence_history')
def mktemp_occurrence_history(self, cur=None): pass
@stored_procedure('swh_mktemp_snapshot_branch')
def mktemp_snapshot_branch(self, cur=None): pass
@stored_procedure('swh_mktemp_entity_lister')
def mktemp_entity_lister(self, cur=None): pass
@stored_procedure('swh_mktemp_entity_history')
def mktemp_entity_history(self, cur=None): pass
@stored_procedure('swh_mktemp_bytea')
def mktemp_bytea(self, cur=None): pass
def register_listener(self, notify_queue, cur=None):
"""Register a listener for NOTIFY queue `notify_queue`"""
self._cursor(cur).execute("LISTEN %s" % notify_queue)
def listen_notifies(self, timeout):
"""Listen to notifications for `timeout` seconds"""
if select.select([self.conn], [], [], timeout) == ([], [], []):
return
else:
self.conn.poll()
while self.conn.notifies:
yield self.conn.notifies.pop(0)
@stored_procedure('swh_content_add')
def content_add_from_temp(self, cur=None): pass
@stored_procedure('swh_directory_add')
def directory_add_from_temp(self, cur=None): pass
@stored_procedure('swh_skipped_content_add')
def skipped_content_add_from_temp(self, cur=None): pass
@stored_procedure('swh_revision_add')
def revision_add_from_temp(self, cur=None): pass
@stored_procedure('swh_release_add')
def release_add_from_temp(self, cur=None): pass
@stored_procedure('swh_occurrence_history_add')
def occurrence_history_add_from_temp(self, cur=None): pass
@stored_procedure('swh_entity_history_add')
def entity_history_add_from_temp(self, cur=None): pass
def store_tmp_bytea(self, ids, cur=None):
"""Store the given identifiers in a new tmp_bytea table"""
cur = self._cursor(cur)
self.mktemp_bytea(cur)
self.copy_to(({'id': elem} for elem in ids), 'tmp_bytea',
['id'], cur)
def content_update_from_temp(self, keys_to_update, cur=None):
cur = self._cursor(cur)
cur.execute("""select swh_content_update(ARRAY[%s] :: text[])""" %
keys_to_update)
content_get_metadata_keys = [
'sha1', 'sha1_git', 'sha256', 'blake2s256', 'length', 'status']
skipped_content_keys = [
'sha1', 'sha1_git', 'sha256', 'blake2s256',
'length', 'reason', 'status', 'origin']
def content_get_metadata_from_temp(self, cur=None):
cur = self._cursor(cur)
cur.execute("""select t.id as sha1, %s from tmp_bytea t
left join content on t.id = content.sha1
""" % ', '.join(self.content_get_metadata_keys[1:]))
yield from cursor_to_bytes(cur)
def content_missing_from_temp(self, cur=None):
cur = self._cursor(cur)
cur.execute("""SELECT sha1, sha1_git, sha256, blake2s256
FROM swh_content_missing()""")
yield from cursor_to_bytes(cur)
def content_missing_per_sha1_from_temp(self, cur=None):
cur = self._cursor(cur)
cur.execute("""SELECT *
FROM swh_content_missing_per_sha1()""")
yield from cursor_to_bytes(cur)
def skipped_content_missing_from_temp(self, cur=None):
cur = self._cursor(cur)
cur.execute("""SELECT sha1, sha1_git, sha256, blake2s256
FROM swh_skipped_content_missing()""")
yield from cursor_to_bytes(cur)
def occurrence_get(self, origin_id, cur=None):
"""Retrieve latest occurrence's information by origin_id.
"""
cur = self._cursor(cur)
cur.execute("""SELECT origin, branch, target, target_type,
(select max(date) from origin_visit
where origin=%s) as date
FROM occurrence
WHERE origin=%s
""",
(origin_id, origin_id))
yield from cursor_to_bytes(cur)
def snapshot_exists(self, snapshot_id, cur=None):
"""Check whether a snapshot with the given id exists"""
cur = self._cursor(cur)
cur.execute("""SELECT 1 FROM snapshot where id=%s""", (snapshot_id,))
return bool(cur.fetchone())
def snapshot_add(self, origin, visit, snapshot_id, cur=None):
"""Add a snapshot for origin/visit from the temporary table"""
cur = self._cursor(cur)
cur.execute("""SELECT swh_snapshot_add(%s, %s, %s)""",
(origin, visit, snapshot_id))
snapshot_get_cols = ['snapshot_id', 'name', 'target', 'target_type']
def snapshot_get_by_id(self, snapshot_id, cur=None):
cur = self._cursor(cur)
query = """\
SELECT %s FROM swh_snapshot_get_by_id(%%s)
""" % ', '.join(self.snapshot_get_cols)
cur.execute(query, (snapshot_id,))
yield from cursor_to_bytes(cur)
def snapshot_get_by_origin_visit(self, origin_id, visit_id, cur=None):
cur = self._cursor(cur)
query = """\
SELECT swh_snapshot_get_by_origin_visit(%s, %s)
"""
cur.execute(query, (origin_id, visit_id))
ret = cur.fetchone()
if ret:
return line_to_bytes(ret)[0]
content_find_cols = ['sha1', 'sha1_git', 'sha256', 'blake2s256', 'length',
'ctime', 'status']
def content_find(self, sha1=None, sha1_git=None, sha256=None,
blake2s256=None, cur=None):
"""Find the content optionally on a combination of the following
checksums sha1, sha1_git, sha256 or blake2s256.
Args:
sha1: sha1 content
git_sha1: the sha1 computed `a la git` sha1 of the content
sha256: sha256 content
blake2s256: blake2s256 content
Returns:
The tuple (sha1, sha1_git, sha256, blake2s256) if found or None.
"""
cur = self._cursor(cur)
cur.execute("""SELECT %s
FROM swh_content_find(%%s, %%s, %%s, %%s)
LIMIT 1""" % ','.join(self.content_find_cols),
(sha1, sha1_git, sha256, blake2s256))
content = line_to_bytes(cur.fetchone())
if set(content) == {None}:
return None
else:
return content
def directory_get_from_temp(self, cur=None):
cur = self._cursor(cur)
cur.execute('''SELECT id, file_entries, dir_entries, rev_entries
FROM swh_directory_get()''')
yield from cursor_to_bytes(cur)
def directory_missing_from_temp(self, cur=None):
cur = self._cursor(cur)
cur.execute('SELECT * FROM swh_directory_missing()')
yield from cursor_to_bytes(cur)
directory_ls_cols = ['dir_id', 'type', 'target', 'name', 'perms',
'status', 'sha1', 'sha1_git', 'sha256', 'length']
def directory_walk_one(self, directory, cur=None):
cur = self._cursor(cur)
cols = ', '.join(self.directory_ls_cols)
query = 'SELECT %s FROM swh_directory_walk_one(%%s)' % cols
cur.execute(query, (directory,))
yield from cursor_to_bytes(cur)
def directory_walk(self, directory, cur=None):
cur = self._cursor(cur)
cols = ', '.join(self.directory_ls_cols)
query = 'SELECT %s FROM swh_directory_walk(%%s)' % cols
cur.execute(query, (directory,))
yield from cursor_to_bytes(cur)
def directory_entry_get_by_path(self, directory, paths, cur=None):
"""Retrieve a directory entry by path.
"""
cur = self._cursor(cur)
cols = ', '.join(self.directory_ls_cols)
query = (
'SELECT %s FROM swh_find_directory_entry_by_path(%%s, %%s)' % cols)
cur.execute(query, (directory, paths))
data = cur.fetchone()
if set(data) == {None}:
return None
return line_to_bytes(data)
def revision_missing_from_temp(self, cur=None):
cur = self._cursor(cur)
cur.execute('SELECT id FROM swh_revision_missing() as r(id)')
yield from cursor_to_bytes(cur)
revision_add_cols = [
'id', 'date', 'date_offset', 'date_neg_utc_offset', 'committer_date',
'committer_date_offset', 'committer_date_neg_utc_offset', 'type',
'directory', 'message', 'author_fullname', 'author_name',
'author_email', 'committer_fullname', 'committer_name',
'committer_email', 'metadata', 'synthetic',
]
revision_get_cols = revision_add_cols + [
'author_id', 'committer_id', 'parents']
def origin_visit_add(self, origin, ts, cur=None):
"""Add a new origin_visit for origin origin at timestamp ts with
status 'ongoing'.
Args:
origin: origin concerned by the visit
ts: the date of the visit
Returns:
The new visit index step for that origin
"""
cur = self._cursor(cur)
self._cursor(cur).execute('SELECT swh_origin_visit_add(%s, %s)',
(origin, ts))
return cur.fetchone()[0]
def origin_visit_update(self, origin, visit_id, status,
metadata, cur=None):
"""Update origin_visit's status."""
cur = self._cursor(cur)
update = """UPDATE origin_visit
SET status=%s, metadata=%s
WHERE origin=%s AND visit=%s"""
cur.execute(update, (status, jsonize(metadata), origin, visit_id))
origin_visit_get_cols = ['origin', 'visit', 'date', 'status', 'metadata',
'snapshot']
def origin_visit_get_all(self, origin_id,
last_visit=None, limit=None, cur=None):
"""Retrieve all visits for origin with id origin_id.
Args:
origin_id: The occurrence's origin
Yields:
The occurrence's history visits
"""
cur = self._cursor(cur)
if last_visit:
extra_condition = 'and visit > %s'
args = (origin_id, last_visit, limit)
else:
extra_condition = ''
args = (origin_id, limit)
query = """\
SELECT %s,
(select id from snapshot where object_id = snapshot_id) as snapshot
FROM origin_visit
WHERE origin=%%s %s
order by date, visit asc
limit %%s""" % (
', '.join(self.origin_visit_get_cols[:-1]), extra_condition
)
cur.execute(query, args)
yield from cursor_to_bytes(cur)
def origin_visit_get(self, origin_id, visit_id, cur=None):
"""Retrieve information on visit visit_id of origin origin_id.
Args:
origin_id: the origin concerned
visit_id: The visit step for that origin
Returns:
The origin_visit information
"""
cur = self._cursor(cur)
query = """\
SELECT %s,
(select id from snapshot where object_id = snapshot_id)
as snapshot
FROM origin_visit
WHERE origin = %%s AND visit = %%s
""" % (', '.join(self.origin_visit_get_cols[:-1]))
cur.execute(query, (origin_id, visit_id))
r = cur.fetchall()
if not r:
return None
return line_to_bytes(r[0])
+ def origin_visit_get_latest_snapshot(self, origin_id,
+ allowed_statuses=None,
+ cur=None):
+ """Retrieve the most recent origin_visit which references a snapshot
+
+ Args:
+ origin_id: the origin concerned
+ allowed_statuses: the visit statuses allowed for the returned visit
+
+ Returns:
+ The origin_visit information, or None if no visit matches.
+ """
+ cur = self._cursor(cur)
+
+ extra_clause = ""
+ if allowed_statuses:
+ extra_clause = cur.mogrify("AND status IN %s",
+ (tuple(allowed_statuses),)).decode()
+
+ query = """\
+ SELECT %s,
+ (select id from snapshot where object_id = snapshot_id)
+ as snapshot
+ FROM origin_visit
+ WHERE
+ origin = %%s AND snapshot_id is not null %s
+ ORDER BY date, visit DESC
+ LIMIT 1
+ """ % (', '.join(self.origin_visit_get_cols[:-1]), extra_clause)
+
+ cur.execute(query, (origin_id,))
+ r = cur.fetchone()
+ if not r:
+ return None
+ return line_to_bytes(r)
+
occurrence_cols = ['origin', 'branch', 'target', 'target_type']
def occurrence_by_origin_visit(self, origin_id, visit_id, cur=None):
"""Retrieve all occurrences for a particular origin_visit.
Args:
origin_id: the origin concerned
visit_id: The visit step for that origin
Yields:
The occurrence's history visits
"""
cur = self._cursor(cur)
query = """\
SELECT %s
FROM swh_occurrence_by_origin_visit(%%s, %%s)
""" % (', '.join(self.occurrence_cols))
cur.execute(query, (origin_id, visit_id))
yield from cursor_to_bytes(cur)
def revision_get_from_temp(self, cur=None):
cur = self._cursor(cur)
query = 'SELECT %s FROM swh_revision_get()' % (
', '.join(self.revision_get_cols))
cur.execute(query)
yield from cursor_to_bytes(cur)
def revision_log(self, root_revisions, limit=None, cur=None):
cur = self._cursor(cur)
query = """SELECT %s
FROM swh_revision_log(%%s, %%s)
""" % ', '.join(self.revision_get_cols)
cur.execute(query, (root_revisions, limit))
yield from cursor_to_bytes(cur)
revision_shortlog_cols = ['id', 'parents']
def revision_shortlog(self, root_revisions, limit=None, cur=None):
cur = self._cursor(cur)
query = """SELECT %s
FROM swh_revision_list(%%s, %%s)
""" % ', '.join(self.revision_shortlog_cols)
cur.execute(query, (root_revisions, limit))
yield from cursor_to_bytes(cur)
def release_missing_from_temp(self, cur=None):
cur = self._cursor(cur)
cur.execute('SELECT id FROM swh_release_missing() as r(id)')
yield from cursor_to_bytes(cur)
object_find_by_sha1_git_cols = ['sha1_git', 'type', 'id', 'object_id']
def object_find_by_sha1_git(self, ids, cur=None):
cur = self._cursor(cur)
self.store_tmp_bytea(ids, cur)
query = 'select %s from swh_object_find_by_sha1_git()' % (
', '.join(self.object_find_by_sha1_git_cols)
)
cur.execute(query)
yield from cursor_to_bytes(cur)
def stat_counters(self, cur=None):
cur = self._cursor(cur)
cur.execute('SELECT * FROM swh_stat_counters()')
yield from cur
fetch_history_cols = ['origin', 'date', 'status', 'result', 'stdout',
'stderr', 'duration']
def create_fetch_history(self, fetch_history, cur=None):
"""Create a fetch_history entry with the data in fetch_history"""
cur = self._cursor(cur)
query = '''INSERT INTO fetch_history (%s)
VALUES (%s) RETURNING id''' % (
','.join(self.fetch_history_cols),
','.join(['%s'] * len(self.fetch_history_cols))
)
cur.execute(query, [fetch_history.get(col) for col in
self.fetch_history_cols])
return cur.fetchone()[0]
def get_fetch_history(self, fetch_history_id, cur=None):
"""Get a fetch_history entry with the given id"""
cur = self._cursor(cur)
query = '''SELECT %s FROM fetch_history WHERE id=%%s''' % (
', '.join(self.fetch_history_cols),
)
cur.execute(query, (fetch_history_id,))
data = cur.fetchone()
if not data:
return None
ret = {'id': fetch_history_id}
for i, col in enumerate(self.fetch_history_cols):
ret[col] = data[i]
return ret
def update_fetch_history(self, fetch_history, cur=None):
"""Update the fetch_history entry from the data in fetch_history"""
cur = self._cursor(cur)
query = '''UPDATE fetch_history
SET %s
WHERE id=%%s''' % (
','.join('%s=%%s' % col for col in self.fetch_history_cols)
)
cur.execute(query, [jsonize(fetch_history.get(col)) for col in
self.fetch_history_cols + ['id']])
base_entity_cols = ['uuid', 'parent', 'name', 'type',
'description', 'homepage', 'active',
'generated', 'lister_metadata',
'metadata']
entity_cols = base_entity_cols + ['last_seen', 'last_id']
entity_history_cols = base_entity_cols + ['id', 'validity']
def origin_add(self, type, url, cur=None):
"""Insert a new origin and return the new identifier."""
insert = """INSERT INTO origin (type, url) values (%s, %s)
RETURNING id"""
cur.execute(insert, (type, url))
return cur.fetchone()[0]
origin_cols = ['id', 'type', 'url', 'lister', 'project']
def origin_get_with(self, type, url, cur=None):
"""Retrieve the origin id from its type and url if found."""
cur = self._cursor(cur)
query = """SELECT %s
FROM origin
WHERE type=%%s AND url=%%s
""" % ','.join(self.origin_cols)
cur.execute(query, (type, url))
data = cur.fetchone()
if data:
return line_to_bytes(data)
return None
def origin_get(self, id, cur=None):
"""Retrieve the origin per its identifier.
"""
cur = self._cursor(cur)
query = """SELECT %s
FROM origin WHERE id=%%s
""" % ','.join(self.origin_cols)
cur.execute(query, (id,))
data = cur.fetchone()
if data:
return line_to_bytes(data)
return None
def origin_search(self, url_pattern, offset=0, limit=50,
regexp=False, cur=None):
"""Search for origins whose urls contain a provided string pattern
or match a provided regular expression.
The search is performed in a case insensitive way.
Args:
url_pattern: the string pattern to search for in origin urls
offset: number of found origins to skip before returning results
limit: the maximum number of found origins to return
regexp: if True, consider the provided pattern as a regular
expression and returns origins whose urls match it
"""
cur = self._cursor(cur)
origin_cols = ','.join(self.origin_cols)
query = """SELECT %s
FROM origin WHERE url %s %%s
ORDER BY id
OFFSET %%s LIMIT %%s"""
if not regexp:
query = query % (origin_cols, 'ILIKE')
query_params = ('%'+url_pattern+'%', offset, limit)
else:
query = query % (origin_cols, '~*')
query_params = (url_pattern, offset, limit)
cur.execute(query, query_params)
yield from cursor_to_bytes(cur)
person_cols = ['fullname', 'name', 'email']
person_get_cols = person_cols + ['id']
def person_add(self, person, cur=None):
"""Add a person identified by its name and email.
Returns:
The new person's id
"""
cur = self._cursor(cur)
query_new_person = '''\
INSERT INTO person(%s)
VALUES (%s)
RETURNING id''' % (
', '.join(self.person_cols),
', '.join('%s' for i in range(len(self.person_cols)))
)
cur.execute(query_new_person,
[person[col] for col in self.person_cols])
return cur.fetchone()[0]
def person_get(self, ids, cur=None):
"""Retrieve the persons identified by the list of ids.
"""
cur = self._cursor(cur)
query = """SELECT %s
FROM person
WHERE id IN %%s""" % ', '.join(self.person_get_cols)
cur.execute(query, (tuple(ids),))
yield from cursor_to_bytes(cur)
release_add_cols = [
'id', 'target', 'target_type', 'date', 'date_offset',
'date_neg_utc_offset', 'name', 'comment', 'synthetic',
'author_fullname', 'author_name', 'author_email',
]
release_get_cols = release_add_cols + ['author_id']
def release_get_from_temp(self, cur=None):
cur = self._cursor(cur)
query = '''
SELECT %s
FROM swh_release_get()
''' % ', '.join(self.release_get_cols)
cur.execute(query)
yield from cursor_to_bytes(cur)
def release_get_by(self,
origin_id,
limit=None,
cur=None):
"""Retrieve a release by occurrence criterion (only origin right now)
Args:
- origin_id: The origin to look for.
"""
cur = self._cursor(cur)
query = """
SELECT %s
FROM swh_release_get_by(%%s)
LIMIT %%s
""" % ', '.join(self.release_get_cols)
cur.execute(query, (origin_id, limit))
yield from cursor_to_bytes(cur)
def revision_get_by(self,
origin_id,
branch_name,
datetime,
limit=None,
cur=None):
"""Retrieve a revision by occurrence criterion.
Args:
- origin_id: The origin to look for
- branch_name: the branch name to look for
- datetime: the lower bound of timerange to look for.
- limit: limit number of results to return
The upper bound being now.
"""
cur = self._cursor(cur)
if branch_name and isinstance(branch_name, str):
branch_name = branch_name.encode('utf-8')
query = '''
SELECT %s
FROM swh_revision_get_by(%%s, %%s, %%s)
LIMIT %%s
''' % ', '.join(self.revision_get_cols)
cur.execute(query, (origin_id, branch_name, datetime, limit))
yield from cursor_to_bytes(cur)
def entity_get(self, uuid, cur=None):
"""Retrieve the entity and its parent hierarchy chain per uuid.
"""
cur = self._cursor(cur)
cur.execute("""SELECT %s
FROM swh_entity_get(%%s)""" % (
', '.join(self.entity_cols)),
(uuid, ))
yield from cursor_to_bytes(cur)
def entity_get_one(self, uuid, cur=None):
"""Retrieve a single entity given its uuid.
"""
cur = self._cursor(cur)
cur.execute("""SELECT %s
FROM entity
WHERE uuid = %%s""" % (
', '.join(self.entity_cols)),
(uuid, ))
data = cur.fetchone()
if not data:
return None
return line_to_bytes(data)
def origin_metadata_add(self, origin, ts, provider, tool,
metadata, cur=None):
""" Add an origin_metadata for the origin at ts with provider, tool and
metadata.
Args:
origin (int): the origin's id for which the metadata is added
ts (datetime): time when the metadata was found
provider (int): the metadata provider identifier
tool (int): the tool's identifier used to extract metadata
metadata (jsonb): the metadata retrieved at the time and location
Returns:
id (int): the origin_metadata unique id
"""
cur = self._cursor(cur)
insert = """INSERT INTO origin_metadata (origin_id, discovery_date,
provider_id, tool_id, metadata) values (%s, %s, %s, %s, %s)
RETURNING id"""
cur.execute(insert, (origin, ts, provider, tool, jsonize(metadata)))
return cur.fetchone()[0]
origin_metadata_get_cols = ['id', 'origin_id', 'discovery_date',
'tool_id', 'metadata', 'provider_id',
'provider_name', 'provider_type',
'provider_url']
def origin_metadata_get_by(self, origin_id, provider_type=None, cur=None):
"""Retrieve all origin_metadata entries for one origin_id
"""
cur = self._cursor(cur)
if not provider_type:
query = '''SELECT %s
FROM swh_origin_metadata_get_by_origin(
%%s)''' % (','.join(
self.origin_metadata_get_cols))
cur.execute(query, (origin_id, ))
else:
query = '''SELECT %s
FROM swh_origin_metadata_get_by_provider_type(
%%s, %%s)''' % (','.join(
self.origin_metadata_get_cols))
cur.execute(query, (origin_id, provider_type))
yield from cursor_to_bytes(cur)
tool_cols = ['id', 'name', 'version', 'configuration']
@stored_procedure('swh_mktemp_tool')
def mktemp_tool(self, cur=None):
pass
def tool_add_from_temp(self, cur=None):
cur = self._cursor(cur)
cur.execute("SELECT %s from swh_tool_add()" % (
','.join(self.tool_cols), ))
yield from cursor_to_bytes(cur)
def tool_get(self, name, version, configuration, cur=None):
cur = self._cursor(cur)
cur.execute('''select %s
from tool
where name=%%s and
version=%%s and
configuration=%%s''' % (
','.join(self.tool_cols)),
(name, version, configuration))
data = cur.fetchone()
if not data:
return None
return line_to_bytes(data)
metadata_provider_cols = ['id', 'provider_name', 'provider_type',
'provider_url', 'metadata']
def metadata_provider_add(self, provider_name, provider_type,
provider_url, metadata, cur=None):
"""Insert a new provider and return the new identifier."""
cur = self._cursor(cur)
insert = """INSERT INTO metadata_provider (provider_name, provider_type,
provider_url, metadata) values (%s, %s, %s, %s)
RETURNING id"""
cur.execute(insert, (provider_name, provider_type, provider_url,
jsonize(metadata)))
return cur.fetchone()[0]
def metadata_provider_get(self, provider_id, cur=None):
cur = self._cursor(cur)
cur.execute('''select %s
from metadata_provider
where provider_id=%%s ''' % (
','.join(self.metadata_provider_cols)),
(provider_id, ))
data = cur.fetchone()
if not data:
return None
return line_to_bytes(data)
def metadata_provider_get_by(self, provider_name, provider_url,
cur=None):
cur = self._cursor(cur)
cur.execute('''select %s
from metadata_provider
where provider_name=%%s and
provider_url=%%s''' % (
','.join(self.metadata_provider_cols)),
(provider_name, provider_url))
data = cur.fetchone()
if not data:
return None
return line_to_bytes(data)
diff --git a/swh/storage/storage.py b/swh/storage/storage.py
index d8b5e1a..8e0263d 100644
--- a/swh/storage/storage.py
+++ b/swh/storage/storage.py
@@ -1,1558 +1,1583 @@
# Copyright (C) 2015-2017 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
from collections import defaultdict
import datetime
import itertools
import json
import dateutil.parser
import psycopg2
from . import converters
from .common import db_transaction_generator, db_transaction
from .db import Db
from .exc import StorageDBError
from swh.model.hashutil import ALGORITHMS
from swh.objstorage import get_objstorage
from swh.objstorage.exc import ObjNotFoundError
# Max block size of contents to return
BULK_BLOCK_CONTENT_LEN_MAX = 10000
CONTENT_HASH_KEYS = ['sha1', 'sha1_git', 'sha256', 'blake2s256']
class Storage():
"""SWH storage proxy, encompassing DB and object storage
"""
def __init__(self, db, objstorage):
"""
Args:
db_conn: either a libpq connection string, or a psycopg2 connection
obj_root: path to the root of the object storage
"""
try:
if isinstance(db, psycopg2.extensions.connection):
self.db = Db(db)
else:
self.db = Db.connect(db)
except psycopg2.OperationalError as e:
raise StorageDBError(e)
self.objstorage = get_objstorage(**objstorage)
def check_config(self, *, check_write):
"""Check that the storage is configured and ready to go."""
if not self.objstorage.check_config(check_write=check_write):
return False
# Check permissions on one of the tables
with self.db.transaction() as cur:
if check_write:
check = 'INSERT'
else:
check = 'SELECT'
cur.execute(
"select has_table_privilege(current_user, 'content', %s)",
(check,)
)
return cur.fetchone()[0]
return True
def content_add(self, content):
"""Add content blobs to the storage
Note: in case of DB errors, objects might have already been added to
the object storage and will not be removed. Since addition to the
object storage is idempotent, that should not be a problem.
Args:
content (iterable): iterable of dictionaries representing
individual pieces of content to add. Each dictionary has the
following keys:
- data (bytes): the actual content
- length (int): content length (default: -1)
- one key for each checksum algorithm in
:data:`swh.model.hashutil.ALGORITHMS`, mapped to the
corresponding checksum
- status (str): one of visible, hidden, absent
- reason (str): if status = absent, the reason why
- origin (int): if status = absent, the origin we saw the
content in
"""
db = self.db
def _unique_key(hash, keys=CONTENT_HASH_KEYS):
"""Given a hash (tuple or dict), return a unique key from the
aggregation of keys.
"""
if isinstance(hash, tuple):
return hash
return tuple([hash[k] for k in keys])
content_by_status = defaultdict(list)
for d in content:
if 'status' not in d:
d['status'] = 'visible'
if 'length' not in d:
d['length'] = -1
content_by_status[d['status']].append(d)
content_with_data = content_by_status['visible']
content_without_data = content_by_status['absent']
missing_content = set(self.content_missing(content_with_data))
missing_skipped = set(_unique_key(hashes) for hashes
in self.skipped_content_missing(
content_without_data))
with db.transaction() as cur:
if missing_content:
# create temporary table for metadata injection
db.mktemp('content', cur)
def add_to_objstorage(cont):
self.objstorage.add(cont['data'],
obj_id=cont['sha1'])
content_filtered = (cont for cont in content_with_data
if cont['sha1'] in missing_content)
db.copy_to(content_filtered, 'tmp_content',
db.content_get_metadata_keys,
cur, item_cb=add_to_objstorage)
# move metadata in place
db.content_add_from_temp(cur)
if missing_skipped:
missing_filtered = (cont for cont in content_without_data
if _unique_key(cont) in missing_skipped)
db.mktemp('skipped_content', cur)
db.copy_to(missing_filtered, 'tmp_skipped_content',
db.skipped_content_keys, cur)
# move metadata in place
db.skipped_content_add_from_temp(cur)
@db_transaction
def content_update(self, content, keys=[], cur=None):
"""Update content blobs to the storage. Does nothing for unknown
contents or skipped ones.
Args:
content (iterable): iterable of dictionaries representing
individual pieces of content to update. Each dictionary has the
following keys:
- data (bytes): the actual content
- length (int): content length (default: -1)
- one key for each checksum algorithm in
:data:`swh.model.hashutil.ALGORITHMS`, mapped to the
corresponding checksum
- status (str): one of visible, hidden, absent
keys (list): List of keys (str) whose values needs an update, e.g.,
new hash column
"""
db = self.db
# TODO: Add a check on input keys. How to properly implement
# this? We don't know yet the new columns.
db.mktemp('content')
select_keys = list(set(db.content_get_metadata_keys).union(set(keys)))
db.copy_to(content, 'tmp_content', select_keys, cur)
db.content_update_from_temp(keys_to_update=keys,
cur=cur)
def content_get(self, content):
"""Retrieve in bulk contents and their data.
Args:
content: iterables of sha1
Yields:
dict: Generates streams of contents as dict with their raw data:
- sha1: sha1's content
- data: bytes data of the content
Raises:
ValueError in case of too much contents are required.
cf. BULK_BLOCK_CONTENT_LEN_MAX
"""
# FIXME: Improve on server module to slice the result
if len(content) > BULK_BLOCK_CONTENT_LEN_MAX:
raise ValueError(
"Send at maximum %s contents." % BULK_BLOCK_CONTENT_LEN_MAX)
for obj_id in content:
try:
data = self.objstorage.get(obj_id)
except ObjNotFoundError:
yield None
continue
yield {'sha1': obj_id, 'data': data}
@db_transaction_generator
def content_get_metadata(self, content, cur=None):
"""Retrieve content metadata in bulk
Args:
content: iterable of content identifiers (sha1)
Returns:
an iterable with content metadata corresponding to the given ids
"""
db = self.db
db.store_tmp_bytea(content, cur)
for content_metadata in db.content_get_metadata_from_temp(cur):
yield dict(zip(db.content_get_metadata_keys, content_metadata))
@db_transaction_generator
def content_missing(self, content, key_hash='sha1', cur=None):
"""List content missing from storage
Args:
content: iterable of dictionaries containing one key for each
checksum algorithm in :data:`swh.model.hashutil.ALGORITHMS`,
mapped to the corresponding checksum, and a length key mapped
to the content length.
key_hash: the name of the hash used as key (default: 'sha1')
Returns:
iterable: missing ids
Raises:
TODO: an exception when we get a hash collision.
"""
db = self.db
keys = CONTENT_HASH_KEYS
if key_hash not in CONTENT_HASH_KEYS:
raise ValueError("key_hash should be one of %s" % keys)
key_hash_idx = keys.index(key_hash)
# Create temporary table for metadata injection
db.mktemp('content', cur)
db.copy_to(content, 'tmp_content', keys + ['length'], cur)
for obj in db.content_missing_from_temp(cur):
yield obj[key_hash_idx]
@db_transaction_generator
def content_missing_per_sha1(self, contents, cur=None):
"""List content missing from storage based only on sha1.
Args:
contents: Iterable of sha1 to check for absence.
Returns:
iterable: missing ids
Raises:
TODO: an exception when we get a hash collision.
"""
db = self.db
db.store_tmp_bytea(contents, cur)
for obj in db.content_missing_per_sha1_from_temp(cur):
yield obj[0]
@db_transaction_generator
def skipped_content_missing(self, content, cur=None):
"""List skipped_content missing from storage
Args:
content: iterable of dictionaries containing the data for each
checksum algorithm.
Returns:
iterable: missing signatures
"""
keys = CONTENT_HASH_KEYS
db = self.db
db.mktemp('skipped_content', cur)
db.copy_to(content, 'tmp_skipped_content',
keys + ['length', 'reason'], cur)
yield from db.skipped_content_missing_from_temp(cur)
@db_transaction
def content_find(self, content, cur=None):
"""Find a content hash in db.
Args:
content: a dictionary representing one content hash, mapping
checksum algorithm names (see swh.model.hashutil.ALGORITHMS) to
checksum values
Returns:
a triplet (sha1, sha1_git, sha256) if the content exist
or None otherwise.
Raises:
ValueError: in case the key of the dictionary is not sha1, sha1_git
nor sha256.
"""
db = self.db
if not set(content).intersection(ALGORITHMS):
raise ValueError('content keys must contain at least one of: '
'sha1, sha1_git, sha256, blake2s256')
c = db.content_find(sha1=content.get('sha1'),
sha1_git=content.get('sha1_git'),
sha256=content.get('sha256'),
blake2s256=content.get('blake2s256'),
cur=cur)
if c:
return dict(zip(db.content_find_cols, c))
return None
def directory_add(self, directories):
"""Add directories to the storage
Args:
directories (iterable): iterable of dictionaries representing the
individual directories to add. Each dict has the following
keys:
- id (sha1_git): the id of the directory to add
- entries (list): list of dicts for each entry in the
directory. Each dict has the following keys:
- name (bytes)
- type (one of 'file', 'dir', 'rev'): type of the
directory entry (file, directory, revision)
- target (sha1_git): id of the object pointed at by the
directory entry
- perms (int): entry permissions
"""
dirs = set()
dir_entries = {
'file': defaultdict(list),
'dir': defaultdict(list),
'rev': defaultdict(list),
}
for cur_dir in directories:
dir_id = cur_dir['id']
dirs.add(dir_id)
for src_entry in cur_dir['entries']:
entry = src_entry.copy()
entry['dir_id'] = dir_id
dir_entries[entry['type']][dir_id].append(entry)
dirs_missing = set(self.directory_missing(dirs))
if not dirs_missing:
return
db = self.db
with db.transaction() as cur:
# Copy directory ids
dirs_missing_dict = ({'id': dir} for dir in dirs_missing)
db.mktemp('directory', cur)
db.copy_to(dirs_missing_dict, 'tmp_directory', ['id'], cur)
# Copy entries
for entry_type, entry_list in dir_entries.items():
entries = itertools.chain.from_iterable(
entries_for_dir
for dir_id, entries_for_dir
in entry_list.items()
if dir_id in dirs_missing)
db.mktemp_dir_entry(entry_type)
db.copy_to(
entries,
'tmp_directory_entry_%s' % entry_type,
['target', 'name', 'perms', 'dir_id'],
cur,
)
# Do the final copy
db.directory_add_from_temp(cur)
@db_transaction_generator
def directory_missing(self, directories, cur):
"""List directories missing from storage
Args:
directories (iterable): an iterable of directory ids
Yields:
missing directory ids
"""
db = self.db
# Create temporary table for metadata injection
db.mktemp('directory', cur)
directories_dicts = ({'id': dir} for dir in directories)
db.copy_to(directories_dicts, 'tmp_directory', ['id'], cur)
for obj in db.directory_missing_from_temp(cur):
yield obj[0]
@db_transaction_generator
def directory_get(self,
directories,
cur=None):
"""Get information on directories.
Args:
- directories: an iterable of directory ids
Returns:
List of directories as dict with keys and associated values.
"""
db = self.db
keys = ('id', 'dir_entries', 'file_entries', 'rev_entries')
db.mktemp('directory', cur)
db.copy_to(({'id': dir_id} for dir_id in directories),
'tmp_directory', ['id'], cur)
dirs = db.directory_get_from_temp(cur)
for line in dirs:
yield dict(zip(keys, line))
@db_transaction_generator
def directory_ls(self, directory, recursive=False, cur=None):
"""Get entries for one directory.
Args:
- directory: the directory to list entries from.
- recursive: if flag on, this list recursively from this directory.
Returns:
List of entries for such directory.
"""
db = self.db
if recursive:
res_gen = db.directory_walk(directory, cur=cur)
else:
res_gen = db.directory_walk_one(directory, cur=cur)
for line in res_gen:
yield dict(zip(db.directory_ls_cols, line))
@db_transaction
def directory_entry_get_by_path(self, directory, paths, cur=None):
"""Get the directory entry (either file or dir) from directory with path.
Args:
- directory: sha1 of the top level directory
- paths: path to lookup from the top level directory. From left
(top) to right (bottom).
Returns:
The corresponding directory entry if found, None otherwise.
"""
db = self.db
res = db.directory_entry_get_by_path(directory, paths, cur)
if res:
return dict(zip(db.directory_ls_cols, res))
def revision_add(self, revisions):
"""Add revisions to the storage
Args:
revisions (iterable): iterable of dictionaries representing the
individual revisions to add. Each dict has the following keys:
- id (sha1_git): id of the revision to add
- date (datetime.DateTime): date the revision was written
- date_offset (int): offset from UTC in minutes the revision
was written
- date_neg_utc_offset (boolean): whether a null date_offset
represents a negative UTC offset
- committer_date (datetime.DateTime): date the revision got
added to the origin
- committer_date_offset (int): offset from UTC in minutes the
revision was added to the origin
- committer_date_neg_utc_offset (boolean): whether a null
committer_date_offset represents a negative UTC offset
- type (one of 'git', 'tar'): type of the revision added
- directory (sha1_git): the directory the revision points at
- message (bytes): the message associated with the revision
- author_name (bytes): the name of the revision author
- author_email (bytes): the email of the revision author
- committer_name (bytes): the name of the revision committer
- committer_email (bytes): the email of the revision committer
- metadata (jsonb): extra information as dictionary
- synthetic (bool): revision's nature (tarball, directory
creates synthetic revision)
- parents (list of sha1_git): the parents of this revision
"""
db = self.db
revisions_missing = set(self.revision_missing(
set(revision['id'] for revision in revisions)))
if not revisions_missing:
return
with db.transaction() as cur:
db.mktemp_revision(cur)
revisions_filtered = (
converters.revision_to_db(revision) for revision in revisions
if revision['id'] in revisions_missing)
parents_filtered = []
db.copy_to(
revisions_filtered, 'tmp_revision', db.revision_add_cols,
cur,
lambda rev: parents_filtered.extend(rev['parents']))
db.revision_add_from_temp(cur)
db.copy_to(parents_filtered, 'revision_history',
['id', 'parent_id', 'parent_rank'], cur)
@db_transaction_generator
def revision_missing(self, revisions, cur=None):
"""List revisions missing from storage
Args:
revisions (iterable): revision ids
Yields:
missing revision ids
"""
db = self.db
db.store_tmp_bytea(revisions, cur)
for obj in db.revision_missing_from_temp(cur):
yield obj[0]
@db_transaction_generator
def revision_get(self, revisions, cur):
"""Get all revisions from storage
Args:
revisions: an iterable of revision ids
Returns:
iterable: an iterable of revisions as dictionaries (or None if the
revision doesn't exist)
"""
db = self.db
db.store_tmp_bytea(revisions, cur)
for line in self.db.revision_get_from_temp(cur):
data = converters.db_to_revision(
dict(zip(db.revision_get_cols, line))
)
if not data['type']:
yield None
continue
yield data
@db_transaction_generator
def revision_log(self, revisions, limit=None, cur=None):
"""Fetch revision entry from the given root revisions.
Args:
revisions: array of root revision to lookup
limit: limitation on the output result. Default to None.
Yields:
List of revision log from such revisions root.
"""
db = self.db
for line in db.revision_log(revisions, limit, cur):
data = converters.db_to_revision(
dict(zip(db.revision_get_cols, line))
)
if not data['type']:
yield None
continue
yield data
@db_transaction_generator
def revision_shortlog(self, revisions, limit=None, cur=None):
"""Fetch the shortlog for the given revisions
Args:
revisions: list of root revisions to lookup
limit: depth limitation for the output
Yields:
a list of (id, parents) tuples.
"""
db = self.db
yield from db.revision_shortlog(revisions, limit, cur)
@db_transaction_generator
def revision_log_by(self, origin_id, branch_name=None, timestamp=None,
limit=None, cur=None):
"""Fetch revision entry from the actual origin_id's latest revision.
Args:
origin_id: the origin id from which deriving the revision
branch_name: (optional) occurrence's branch name
timestamp: (optional) occurrence's time
limit: (optional) depth limitation for the
output. Default to None.
Yields:
The revision log starting from the revision derived from
the (origin, branch_name, timestamp) combination if any.
Returns:
None if no revision matching this combination is found.
"""
db = self.db
# Retrieve the revision by criterion
revisions = list(db.revision_get_by(
origin_id, branch_name, timestamp, limit=1))
if not revisions:
return None
revision_id = revisions[0][0]
# otherwise, retrieve the revision log from that revision
yield from self.revision_log([revision_id], limit)
def release_add(self, releases):
"""Add releases to the storage
Args:
releases (iterable): iterable of dictionaries representing the
individual releases to add. Each dict has the following keys:
- id (sha1_git): id of the release to add
- revision (sha1_git): id of the revision the release points to
- date (datetime.DateTime): the date the release was made
- date_offset (int): offset from UTC in minutes the release was
made
- date_neg_utc_offset (boolean): whether a null date_offset
represents a negative UTC offset
- name (bytes): the name of the release
- comment (bytes): the comment associated with the release
- author_name (bytes): the name of the release author
- author_email (bytes): the email of the release author
"""
db = self.db
release_ids = set(release['id'] for release in releases)
releases_missing = set(self.release_missing(release_ids))
if not releases_missing:
return
with db.transaction() as cur:
db.mktemp_release(cur)
releases_filtered = (
converters.release_to_db(release) for release in releases
if release['id'] in releases_missing
)
db.copy_to(releases_filtered, 'tmp_release', db.release_add_cols,
cur)
db.release_add_from_temp(cur)
@db_transaction_generator
def release_missing(self, releases, cur=None):
"""List releases missing from storage
Args:
releases: an iterable of release ids
Returns:
a list of missing release ids
"""
db = self.db
# Create temporary table for metadata injection
db.store_tmp_bytea(releases, cur)
for obj in db.release_missing_from_temp(cur):
yield obj[0]
@db_transaction_generator
def release_get(self, releases, cur=None):
"""Given a list of sha1, return the releases's information
Args:
releases: list of sha1s
Yields:
releases: list of releases as dicts with the following keys:
- id: origin's id
- revision: origin's type
- url: origin's url
- lister: lister's uuid
- project: project's uuid (FIXME, retrieve this information)
Raises:
ValueError: if the keys does not match (url and type) nor id.
"""
db = self.db
# Create temporary table for metadata injection
db.store_tmp_bytea(releases, cur)
for release in db.release_get_from_temp(cur):
yield converters.db_to_release(
dict(zip(db.release_get_cols, release))
)
@db_transaction
def snapshot_add(self, origin, visit, snapshot, back_compat=True,
cur=None):
"""Add a snapshot for the given origin/visit couple
Args:
origin (int): id of the origin
visit (int): id of the visit
snapshot (dict): the snapshot to add to the visit, containing the
following keys:
- **id** (:class:`bytes`): id of the snapshot
- **branches** (:class:`dict`): branches the snapshot contains,
mapping the branch name (:class:`bytes`) to the branch target,
itself a :class:`dict` (or ``None`` if the branch points to an
unknown object)
- **target_type** (:class:`str`): one of ``content``,
``directory``, ``revision``, ``release``,
``snapshot``, ``alias``
- **target** (:class:`bytes`): identifier of the target
(currently a ``sha1_git`` for all object kinds, or the name
of the target branch for aliases)
back_compat (bool): whether to add the occurrences for
backwards-compatibility
"""
db = self.db
if not db.snapshot_exists(snapshot['id'], cur):
db.mktemp_snapshot_branch(cur)
db.copy_to(
(
{
'name': name,
'target': info['target'] if info else None,
'target_type': info['target_type'] if info else None,
}
for name, info in snapshot['branches'].items()
),
'tmp_snapshot_branch',
['name', 'target', 'target_type'],
cur,
)
db.snapshot_add(origin, visit, snapshot['id'], cur)
if not back_compat:
return
# TODO: drop this compat feature
occurrences = []
for name, info in snapshot['branches'].items():
if not info:
target = b'\x00' * 20
target_type = 'revision'
elif info['target_type'] == 'alias':
continue
else:
target = info['target']
target_type = info['target_type']
occurrences.append({
'origin': origin,
'visit': visit,
'branch': name,
'target': target,
'target_type': target_type,
})
self.occurrence_add(occurrences)
@db_transaction
def snapshot_get(self, snapshot_id, cur=None):
"""Get the snapshot with the given id
Args:
snapshot_id (bytes): id of the snapshot
Returns:
dict: a snapshot with two keys:
id:: identifier for the snapshot
branches:: a list of branches contained by the snapshot
"""
db = self.db
branches = {}
for branch in db.snapshot_get_by_id(snapshot_id, cur):
branch = dict(zip(db.snapshot_get_cols, branch))
del branch['snapshot_id']
name = branch.pop('name')
if branch == {'target': None, 'target_type': None}:
branch = None
branches[name] = branch
if branches:
return {'id': snapshot_id, 'branches': branches}
if db.snapshot_exists(snapshot_id, cur):
# empty snapshot
return {'id': snapshot_id, 'branches': {}}
return None
@db_transaction
def snapshot_get_by_origin_visit(self, origin, visit, cur=None):
"""Get the snapshot for the given origin visit
Args:
origin (int): the origin identifier
visit (int): the visit identifier
Returns:
dict: a snapshot with two keys:
id:: identifier for the snapshot
branches:: a dictionary containing the snapshot branch information
"""
db = self.db
snapshot_id = db.snapshot_get_by_origin_visit(origin, visit, cur)
if snapshot_id:
return self.snapshot_get(snapshot_id, cur=cur)
else:
# compatibility code during the snapshot migration
origin_visit_info = self.origin_visit_get_by(origin, visit,
cur=cur)
if origin_visit_info is None:
return None
ret = {'id': None}
ret['branches'] = origin_visit_info['occurrences']
return ret
return None
+ @db_transaction
+ def snapshot_get_latest(self, origin, allowed_statuses=None, cur=None):
+ """Get the latest snapshot for the given origin, optionally only from visits
+ that have one of the given allowed_statuses.
+
+ Args:
+ origin (int): the origin identifier
+ allowed_statuses (list of str): list of visit statuses considered
+ to find the latest snapshot for the visit. For instance,
+ ``allowed_statuses=['full']`` will only consider visits that
+ have successfully run to completion.
+
+ Returns:
+ dict: a snapshot with two keys:
+ id:: identifier for the snapshot
+ branches:: a dictionary containing the snapshot branch information
+ """
+ db = self.db
+
+ origin_visit = db.origin_visit_get_latest_snapshot(
+ origin, allowed_statuses=allowed_statuses, cur=cur)
+ if origin_visit:
+ origin_visit = dict(zip(db.origin_visit_get_cols, origin_visit))
+ return self.snapshot_get(origin_visit['snapshot'], cur=cur)
+
@db_transaction
def occurrence_add(self, occurrences, cur=None):
"""Add occurrences to the storage
Args:
occurrences: iterable of dictionaries representing the individual
occurrences to add. Each dict has the following keys:
- origin (int): id of the origin corresponding to the
occurrence
- visit (int): id of the visit corresponding to the
occurrence
- branch (str): the reference name of the occurrence
- target (sha1_git): the id of the object pointed to by
the occurrence
- target_type (str): the type of object pointed to by the
occurrence
"""
db = self.db
db.mktemp_occurrence_history(cur)
db.copy_to(occurrences, 'tmp_occurrence_history',
['origin', 'branch', 'target', 'target_type', 'visit'], cur)
db.occurrence_history_add_from_temp(cur)
@db_transaction_generator
def occurrence_get(self, origin_id, cur=None):
"""Retrieve occurrence information per origin_id.
Args:
origin_id: The occurrence's origin.
Yields:
List of occurrences matching criterion.
"""
db = self.db
for line in db.occurrence_get(origin_id, cur):
yield {
'origin': line[0],
'branch': line[1],
'target': line[2],
'target_type': line[3],
}
@db_transaction
def origin_visit_add(self, origin, ts, cur=None):
"""Add an origin_visit for the origin at ts with status 'ongoing'.
Args:
origin: Visited Origin id
ts: timestamp of such visit
Returns:
dict: dictionary with keys origin and visit where:
- origin: origin identifier
- visit: the visit identifier for the new visit occurrence
- ts (datetime.DateTime): the visit date
"""
if isinstance(ts, str):
ts = dateutil.parser.parse(ts)
return {
'origin': origin,
'visit': self.db.origin_visit_add(origin, ts, cur)
}
@db_transaction
def origin_visit_update(self, origin, visit_id, status, metadata=None,
cur=None):
"""Update an origin_visit's status.
Args:
origin: Visited Origin id
visit_id: Visit's id
status: Visit's new status
metadata: Data associated to the visit
Returns:
None
"""
return self.db.origin_visit_update(origin, visit_id, status, metadata,
cur)
@db_transaction_generator
def origin_visit_get(self, origin, last_visit=None, limit=None, cur=None):
"""Retrieve all the origin's visit's information.
Args:
origin (int): The occurrence's origin (identifier).
last_visit (int): Starting point from which listing the next visits
Default to None
limit (int): Number of results to return from the last visit.
Default to None
Yields:
List of visits.
"""
db = self.db
for line in db.origin_visit_get_all(
origin, last_visit=last_visit, limit=limit, cur=cur):
data = dict(zip(self.db.origin_visit_get_cols, line))
yield data
@db_transaction
def origin_visit_get_by(self, origin, visit, cur=None):
"""Retrieve origin visit's information.
Args:
origin: The occurrence's origin (identifier).
Returns:
The information on that particular (origin, visit)
"""
db = self.db
ori_visit = db.origin_visit_get(origin, visit, cur)
if not ori_visit:
return None
ori_visit = dict(zip(self.db.origin_visit_get_cols, ori_visit))
if ori_visit['snapshot']:
ori_visit['occurrences'] = self.snapshot_get(ori_visit['snapshot'],
cur=cur)['branches']
return ori_visit
# TODO: remove Backwards compatibility after snapshot migration
occs = {}
for occ in db.occurrence_by_origin_visit(origin, visit):
_, branch_name, target, target_type = occ
occs[branch_name] = {
'target': target,
'target_type': target_type
}
ori_visit['occurrences'] = occs
return ori_visit
@db_transaction_generator
def revision_get_by(self,
origin_id,
branch_name=None,
timestamp=None,
limit=None,
cur=None):
"""Given an origin_id, retrieve occurrences' list per given criterions.
Args:
origin_id: The origin to filter on.
branch_name: (optional) branch name.
timestamp: (optional) time.
limit: (optional) limit
Yields:
List of occurrences matching the criterions or None if nothing is
found.
"""
for line in self.db.revision_get_by(origin_id,
branch_name,
timestamp,
limit=limit,
cur=cur):
data = converters.db_to_revision(
dict(zip(self.db.revision_get_cols, line))
)
if not data['type']:
yield None
continue
yield data
def release_get_by(self, origin_id, limit=None):
"""Given an origin id, return all the tag objects pointing to heads of
origin_id.
Args:
origin_id: the origin to filter on.
limit: None by default
Yields:
List of releases matching the criterions or None if nothing is
found.
"""
for line in self.db.release_get_by(origin_id, limit=limit):
data = converters.db_to_release(
dict(zip(self.db.release_get_cols, line))
)
yield data
@db_transaction
def object_find_by_sha1_git(self, ids, cur=None):
"""Return the objects found with the given ids.
Args:
ids: a generator of sha1_gits
Returns:
dict: a mapping from id to the list of objects found. Each object
found is itself a dict with keys:
- sha1_git: the input id
- type: the type of object found
- id: the id of the object found
- object_id: the numeric id of the object found.
"""
db = self.db
ret = {id: [] for id in ids}
for retval in db.object_find_by_sha1_git(ids):
if retval[1]:
ret[retval[0]].append(dict(zip(db.object_find_by_sha1_git_cols,
retval)))
return ret
origin_keys = ['id', 'type', 'url', 'lister', 'project']
@db_transaction
def origin_get(self, origin, cur=None):
"""Return the origin either identified by its id or its tuple
(type, url).
Args:
origin: dictionary representing the individual origin to find.
This dict has either the keys type and url:
- type (FIXME: enum TBD): the origin type ('git', 'wget', ...)
- url (bytes): the url the origin points to
or the id:
- id: the origin id
Returns:
dict: the origin dictionary with the keys:
- id: origin's id
- type: origin's type
- url: origin's url
- lister: lister's uuid
- project: project's uuid (FIXME, retrieve this information)
Raises:
ValueError: if the keys does not match (url and type) nor id.
"""
db = self.db
origin_id = origin.get('id')
if origin_id: # check lookup per id first
ori = db.origin_get(origin_id, cur)
elif 'type' in origin and 'url' in origin: # or lookup per type, url
ori = db.origin_get_with(origin['type'], origin['url'], cur)
else: # unsupported lookup
raise ValueError('Origin must have either id or (type and url).')
if ori:
return dict(zip(self.origin_keys, ori))
return None
@db_transaction_generator
def origin_search(self, url_pattern, offset=0, limit=50,
regexp=False, cur=None):
"""Search for origins whose urls contain a provided string pattern
or match a provided regular expression.
The search is performed in a case insensitive way.
Args:
url_pattern: the string pattern to search for in origin urls
offset: number of found origins to skip before returning results
limit: the maximum number of found origins to return
regexp: if True, consider the provided pattern as a regular
expression and return origins whose urls match it
Returns:
An iterable of dict containing origin information as returned
by :meth:`swh.storage.storage.Storage.origin_get`.
"""
db = self.db
for origin in db.origin_search(url_pattern, offset, limit,
regexp, cur):
yield dict(zip(self.origin_keys, origin))
@db_transaction
def _person_add(self, person, cur=None):
"""Add a person in storage.
Note: Internal function for now, do not use outside of this module.
Do not do anything fancy in case a person already exists.
Please adapt code if more checks are needed.
Args:
person: dictionary with keys name and email.
Returns:
Id of the new person.
"""
db = self.db
return db.person_add(person)
@db_transaction_generator
def person_get(self, person, cur=None):
"""Return the persons identified by their ids.
Args:
person: array of ids.
Returns:
The array of persons corresponding of the ids.
"""
db = self.db
for person in db.person_get(person):
yield dict(zip(db.person_get_cols, person))
@db_transaction
def origin_add(self, origins, cur=None):
"""Add origins to the storage
Args:
origins: list of dictionaries representing the individual origins,
with the following keys:
- type: the origin type ('git', 'svn', 'deb', ...)
- url (bytes): the url the origin points to
Returns:
list: ids corresponding to the given origins
"""
ret = []
for origin in origins:
ret.append(self.origin_add_one(origin, cur=cur))
return ret
@db_transaction
def origin_add_one(self, origin, cur=None):
"""Add origin to the storage
Args:
origin: dictionary representing the individual origin to add. This
dict has the following keys:
- type (FIXME: enum TBD): the origin type ('git', 'wget', ...)
- url (bytes): the url the origin points to
Returns:
the id of the added origin, or of the identical one that already
exists.
"""
db = self.db
data = db.origin_get_with(origin['type'], origin['url'], cur)
if data:
return data[0]
return db.origin_add(origin['type'], origin['url'], cur)
@db_transaction
def fetch_history_start(self, origin_id, cur=None):
"""Add an entry for origin origin_id in fetch_history. Returns the id
of the added fetch_history entry
"""
fetch_history = {
'origin': origin_id,
'date': datetime.datetime.now(tz=datetime.timezone.utc),
}
return self.db.create_fetch_history(fetch_history, cur)
@db_transaction
def fetch_history_end(self, fetch_history_id, data, cur=None):
"""Close the fetch_history entry with id `fetch_history_id`, replacing
its data with `data`.
"""
now = datetime.datetime.now(tz=datetime.timezone.utc)
fetch_history = self.db.get_fetch_history(fetch_history_id, cur)
if not fetch_history:
raise ValueError('No fetch_history with id %d' % fetch_history_id)
fetch_history['duration'] = now - fetch_history['date']
fetch_history.update(data)
self.db.update_fetch_history(fetch_history, cur)
@db_transaction
def fetch_history_get(self, fetch_history_id, cur=None):
"""Get the fetch_history entry with id `fetch_history_id`.
"""
return self.db.get_fetch_history(fetch_history_id, cur)
@db_transaction
def entity_add(self, entities, cur=None):
"""Add the given entitites to the database (in entity_history).
Args:
entities (iterable): iterable of dictionaries with the following
keys:
- uuid (uuid): id of the entity
- parent (uuid): id of the parent entity
- name (str): name of the entity
- type (str): type of entity (one of 'organization',
'group_of_entities', 'hosting', 'group_of_persons', 'person',
'project')
- description (str, optional): description of the entity
- homepage (str): url of the entity's homepage
- active (bool): whether the entity is active
- generated (bool): whether the entity was generated
- lister_metadata (dict): lister-specific entity metadata
- metadata (dict): other metadata for the entity
- validity (datetime.DateTime array): timestamps at which we
listed the entity.
"""
db = self.db
cols = list(db.entity_history_cols)
cols.remove('id')
db.mktemp_entity_history()
db.copy_to(entities, 'tmp_entity_history', cols, cur)
db.entity_history_add_from_temp()
@db_transaction_generator
def entity_get_from_lister_metadata(self, entities, cur=None):
"""Fetch entities from the database, matching with the lister and
associated metadata.
Args:
entities (iterable): dictionaries containing the lister metadata to
look for. Useful keys are 'lister', 'type', 'id', ...
Yields:
fetched entities with all their attributes. If no match was found,
the returned entity is None.
"""
db = self.db
db.mktemp_entity_lister(cur)
mapped_entities = []
for i, entity in enumerate(entities):
mapped_entity = {
'id': i,
'lister_metadata': entity,
}
mapped_entities.append(mapped_entity)
db.copy_to(mapped_entities, 'tmp_entity_lister',
['id', 'lister_metadata'], cur)
cur.execute('''select id, %s
from swh_entity_from_tmp_entity_lister()
order by id''' %
','.join(db.entity_cols))
for id, *entity_vals in cur:
fetched_entity = dict(zip(db.entity_cols, entity_vals))
if fetched_entity['uuid']:
yield fetched_entity
else:
yield {
'uuid': None,
'lister_metadata': entities[i],
}
@db_transaction_generator
def entity_get(self, uuid, cur=None):
"""Returns the list of entity per its uuid identifier and also its
parent hierarchy.
Args:
uuid: entity's identifier
Returns:
List of entities starting with entity with uuid and the parent
hierarchy from such entity.
"""
db = self.db
for entity in db.entity_get(uuid, cur):
yield dict(zip(db.entity_cols, entity))
@db_transaction
def entity_get_one(self, uuid, cur=None):
"""Returns one entity using its uuid identifier.
Args:
uuid: entity's identifier
Returns:
the object corresponding to the given entity
"""
db = self.db
entity = db.entity_get_one(uuid, cur)
if entity:
return dict(zip(db.entity_cols, entity))
else:
return None
@db_transaction
def stat_counters(self, cur=None):
"""compute statistics about the number of tuples in various tables
Returns:
dict: a dictionary mapping textual labels (e.g., content) to
integer values (e.g., the number of tuples in table content)
"""
return {k: v for (k, v) in self.db.stat_counters()}
@db_transaction
def origin_metadata_add(self, origin_id, ts, provider, tool, metadata,
cur=None):
""" Add an origin_metadata for the origin at ts with provenance and
metadata.
Args:
origin_id (int): the origin's id for which the metadata is added
ts (datetime): timestamp of the found metadata
provider (int): the provider of metadata (ex:'hal')
tool (int): tool used to extract metadata
metadata (jsonb): the metadata retrieved at the time and location
Returns:
id (int): the origin_metadata unique id
"""
if isinstance(ts, str):
ts = dateutil.parser.parse(ts)
return self.db.origin_metadata_add(origin_id, ts, provider, tool,
metadata, cur)
@db_transaction_generator
def origin_metadata_get_by(self, origin_id, provider_type=None, cur=None):
"""Retrieve list of all origin_metadata entries for the origin_id
Args:
origin_id (int): the unique origin identifier
provider_type (str): (optional) type of provider
Returns:
list of dicts: the origin_metadata dictionary with the keys:
- id (int): origin_metadata's id
- origin_id (int): origin's id
- discovery_date (datetime): timestamp of discovery
- tool_id (int): metadata's extracting tool
- metadata (jsonb)
- provider_id (int): metadata's provider
- provider_name (str)
- provider_type (str)
- provider_url (str)
"""
db = self.db
for line in db.origin_metadata_get_by(origin_id, provider_type, cur):
yield dict(zip(db.origin_metadata_get_cols, line))
@db_transaction_generator
def tool_add(self, tools, cur=None):
"""Add new tools to the storage.
Args:
tools (iterable of :class:`dict`): Tool information to add to
storage. Each tool is a :class:`dict` with the following keys:
- name (:class:`str`): name of the tool
- version (:class:`str`): version of the tool
- configuration (:class:`dict`): configuration of the tool,
must be json-encodable
Returns:
`iterable` of :class:`dict`: All the tools inserted in storage
(including the internal ``id``). The order of the list is not
guaranteed to match the order of the initial list.
"""
db = self.db
db.mktemp_tool(cur)
db.copy_to(tools, 'tmp_tool',
['name', 'version', 'configuration'],
cur)
tools = db.tool_add_from_temp(cur)
for line in tools:
yield dict(zip(db.tool_cols, line))
@db_transaction
def tool_get(self, tool, cur=None):
"""Retrieve tool information.
Args:
tool (dict): Tool information we want to retrieve from storage.
The dicts have the same keys as those used in :func:`tool_add`.
Returns:
dict: The full tool information if it exists (``id`` included),
None otherwise.
"""
db = self.db
tool_conf = tool['configuration']
if isinstance(tool_conf, dict):
tool_conf = json.dumps(tool_conf)
idx = db.tool_get(tool['name'],
tool['version'],
tool_conf)
if not idx:
return None
return dict(zip(self.db.tool_cols, idx))
@db_transaction
def metadata_provider_add(self, provider_name, provider_type, provider_url,
metadata, cur=None):
db = self.db
return db.metadata_provider_add(provider_name, provider_type,
provider_url, metadata, cur)
@db_transaction
def metadata_provider_get(self, provider_id, cur=None):
db = self.db
result = db.metadata_provider_get(provider_id)
if not result:
return None
return dict(zip(self.db.metadata_provider_cols, result))
@db_transaction
def metadata_provider_get_by(self, provider, cur=None):
db = self.db
result = db.metadata_provider_get_by(provider['provider_name'],
provider['provider_url'])
if not result:
return None
return dict(zip(self.db.metadata_provider_cols, result))
diff --git a/swh/storage/tests/test_storage.py b/swh/storage/tests/test_storage.py
index 393f890..cb99e7f 100644
--- a/swh/storage/tests/test_storage.py
+++ b/swh/storage/tests/test_storage.py
@@ -1,2477 +1,2522 @@
# Copyright (C) 2015-2017 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import copy
import datetime
from operator import itemgetter
import psycopg2
import unittest
from uuid import UUID
from unittest.mock import patch
from nose.tools import istest
from nose.plugins.attrib import attr
from swh.model import from_disk, identifiers
from swh.model.hashutil import hash_to_bytes
from swh.core.tests.db_testing import DbTestFixture
from swh.storage.tests.storage_testing import StorageTestFixture
@attr('db')
class BaseTestStorage(StorageTestFixture, DbTestFixture):
def setUp(self):
super().setUp()
db = self.test_db[self.TEST_STORAGE_DB_NAME]
self.conn = db.conn
self.cursor = db.cursor
self.maxDiff = None
self.cont = {
'data': b'42\n',
'length': 3,
'sha1': hash_to_bytes(
'34973274ccef6ab4dfaaf86599792fa9c3fe4689'),
'sha1_git': hash_to_bytes(
'd81cc0710eb6cf9efd5b920a8453e1e07157b6cd'),
'sha256': hash_to_bytes(
'673650f936cb3b0a2f93ce09d81be107'
'48b1b203c19e8176b4eefc1964a0cf3a'),
'blake2s256': hash_to_bytes('d5fe1939576527e42cfd76a9455a2'
'432fe7f56669564577dd93c4280e76d661d'),
'status': 'visible',
}
self.cont2 = {
'data': b'4242\n',
'length': 5,
'sha1': hash_to_bytes(
'61c2b3a30496d329e21af70dd2d7e097046d07b7'),
'sha1_git': hash_to_bytes(
'36fade77193cb6d2bd826161a0979d64c28ab4fa'),
'sha256': hash_to_bytes(
'859f0b154fdb2d630f45e1ecae4a8629'
'15435e663248bb8461d914696fc047cd'),
'blake2s256': hash_to_bytes('849c20fad132b7c2d62c15de310adfe87be'
'94a379941bed295e8141c6219810d'),
'status': 'visible',
}
self.cont3 = {
'data': b'424242\n',
'length': 7,
'sha1': hash_to_bytes(
'3e21cc4942a4234c9e5edd8a9cacd1670fe59f13'),
'sha1_git': hash_to_bytes(
'c932c7649c6dfa4b82327d121215116909eb3bea'),
'sha256': hash_to_bytes(
'92fb72daf8c6818288a35137b72155f5'
'07e5de8d892712ab96277aaed8cf8a36'),
'blake2s256': hash_to_bytes('76d0346f44e5a27f6bafdd9c2befd304af'
'f83780f93121d801ab6a1d4769db11'),
'status': 'visible',
}
self.missing_cont = {
'data': b'missing\n',
'length': 8,
'sha1': hash_to_bytes(
'f9c24e2abb82063a3ba2c44efd2d3c797f28ac90'),
'sha1_git': hash_to_bytes(
'33e45d56f88993aae6a0198013efa80716fd8919'),
'sha256': hash_to_bytes(
'6bbd052ab054ef222c1c87be60cd191a'
'ddedd24cc882d1f5f7f7be61dc61bb3a'),
'blake2s256': hash_to_bytes('306856b8fd879edb7b6f1aeaaf8db9bbecc9'
'93cd7f776c333ac3a782fa5c6eba'),
'status': 'absent',
}
self.skipped_cont = {
'length': 1024 * 1024 * 200,
'sha1_git': hash_to_bytes(
'33e45d56f88993aae6a0198013efa80716fd8920'),
'sha1': hash_to_bytes(
'43e45d56f88993aae6a0198013efa80716fd8920'),
'sha256': hash_to_bytes(
'7bbd052ab054ef222c1c87be60cd191a'
'ddedd24cc882d1f5f7f7be61dc61bb3a'),
'blake2s256': hash_to_bytes(
'ade18b1adecb33f891ca36664da676e1'
'2c772cc193778aac9a137b8dc5834b9b'),
'reason': 'Content too long',
'status': 'absent',
}
self.skipped_cont2 = {
'length': 1024 * 1024 * 300,
'sha1_git': hash_to_bytes(
'44e45d56f88993aae6a0198013efa80716fd8921'),
'sha1': hash_to_bytes(
'54e45d56f88993aae6a0198013efa80716fd8920'),
'sha256': hash_to_bytes(
'8cbd052ab054ef222c1c87be60cd191a'
'ddedd24cc882d1f5f7f7be61dc61bb3a'),
'blake2s256': hash_to_bytes(
'9ce18b1adecb33f891ca36664da676e1'
'2c772cc193778aac9a137b8dc5834b9b'),
'reason': 'Content too long',
'status': 'absent',
}
self.dir = {
'id': b'4\x013\x422\x531\x000\xf51\xe62\xa73\xff7\xc3\xa90',
'entries': [
{
'name': b'foo',
'type': 'file',
'target': self.cont['sha1_git'],
'perms': from_disk.DentryPerms.content,
},
{
'name': b'bar\xc3',
'type': 'dir',
'target': b'12345678901234567890',
'perms': from_disk.DentryPerms.directory,
},
],
}
self.dir2 = {
'id': b'4\x013\x422\x531\x000\xf51\xe62\xa73\xff7\xc3\xa95',
'entries': [
{
'name': b'oof',
'type': 'file',
'target': self.cont2['sha1_git'],
'perms': from_disk.DentryPerms.content,
}
],
}
self.dir3 = {
'id': hash_to_bytes('33e45d56f88993aae6a0198013efa80716fd8921'),
'entries': [
{
'name': b'foo',
'type': 'file',
'target': self.cont['sha1_git'],
'perms': from_disk.DentryPerms.content,
},
{
'name': b'bar',
'type': 'dir',
'target': b'12345678901234560000',
'perms': from_disk.DentryPerms.directory,
},
{
'name': b'hello',
'type': 'file',
'target': b'12345678901234567890',
'perms': from_disk.DentryPerms.content,
},
],
}
self.minus_offset = datetime.timezone(datetime.timedelta(minutes=-120))
self.plus_offset = datetime.timezone(datetime.timedelta(minutes=120))
self.revision = {
'id': b'56789012345678901234',
'message': b'hello',
'author': {
'name': b'Nicolas Dandrimont',
'email': b'nicolas@example.com',
'fullname': b'Nicolas Dandrimont <nicolas@example.com> ',
},
'date': {
'timestamp': 1234567890,
'offset': 120,
'negative_utc': None,
},
'committer': {
'name': b'St\xc3fano Zacchiroli',
'email': b'stefano@example.com',
'fullname': b'St\xc3fano Zacchiroli <stefano@example.com>'
},
'committer_date': {
'timestamp': 1123456789,
'offset': 0,
'negative_utc': True,
},
'parents': [b'01234567890123456789', b'23434512345123456789'],
'type': 'git',
'directory': self.dir['id'],
'metadata': {
'checksums': {
'sha1': 'tarball-sha1',
'sha256': 'tarball-sha256',
},
'signed-off-by': 'some-dude',
'extra_headers': [
['gpgsig', b'test123'],
['mergetags', [b'foo\\bar', b'\x22\xaf\x89\x80\x01\x00']],
],
},
'synthetic': True
}
self.revision2 = {
'id': b'87659012345678904321',
'message': b'hello again',
'author': {
'name': b'Roberto Dicosmo',
'email': b'roberto@example.com',
'fullname': b'Roberto Dicosmo <roberto@example.com>',
},
'date': {
'timestamp': {
'seconds': 1234567843,
'microseconds': 220000,
},
'offset': -720,
'negative_utc': None,
},
'committer': {
'name': b'tony',
'email': b'ar@dumont.fr',
'fullname': b'tony <ar@dumont.fr>',
},
'committer_date': {
'timestamp': 1123456789,
'offset': 0,
'negative_utc': False,
},
'parents': [b'01234567890123456789'],
'type': 'git',
'directory': self.dir2['id'],
'metadata': None,
'synthetic': False
}
self.revision3 = {
'id': hash_to_bytes('7026b7c1a2af56521e951c01ed20f255fa054238'),
'message': b'a simple revision with no parents this time',
'author': {
'name': b'Roberto Dicosmo',
'email': b'roberto@example.com',
'fullname': b'Roberto Dicosmo <roberto@example.com>',
},
'date': {
'timestamp': {
'seconds': 1234567843,
'microseconds': 220000,
},
'offset': -720,
'negative_utc': None,
},
'committer': {
'name': b'tony',
'email': b'ar@dumont.fr',
'fullname': b'tony <ar@dumont.fr>',
},
'committer_date': {
'timestamp': 1127351742,
'offset': 0,
'negative_utc': False,
},
'parents': [],
'type': 'git',
'directory': self.dir2['id'],
'metadata': None,
'synthetic': True
}
self.revision4 = {
'id': hash_to_bytes('368a48fe15b7db2383775f97c6b247011b3f14f4'),
'message': b'parent of self.revision2',
'author': {
'name': b'me',
'email': b'me@soft.heri',
'fullname': b'me <me@soft.heri>',
},
'date': {
'timestamp': {
'seconds': 1244567843,
'microseconds': 220000,
},
'offset': -720,
'negative_utc': None,
},
'committer': {
'name': b'committer-dude',
'email': b'committer@dude.com',
'fullname': b'committer-dude <committer@dude.com>',
},
'committer_date': {
'timestamp': {
'seconds': 1244567843,
'microseconds': 220000,
},
'offset': -720,
'negative_utc': None,
},
'parents': [self.revision3['id']],
'type': 'git',
'directory': self.dir['id'],
'metadata': None,
'synthetic': False
}
self.origin = {
'url': 'file:///dev/null',
'type': 'git',
}
self.origin2 = {
'url': 'file:///dev/zero',
'type': 'git',
}
self.provider = {
'name': 'hal',
'type': 'deposit-client',
'url': 'http:///hal/inria',
'metadata': {
'location': 'France'
}
}
self.metadata_tool = {
'name': 'swh-deposit',
'version': '0.0.1',
'configuration': {
'sword_version': '2'
}
}
self.origin_metadata = {
'origin': self.origin,
'discovery_date': datetime.datetime(2015, 1, 1, 23, 0, 0,
tzinfo=datetime.timezone.utc),
'provider': self.provider,
'tool': 'swh-deposit',
'metadata': {
'name': 'test_origin_metadata',
'version': '0.0.1'
}
}
self.origin_metadata2 = {
'origin': self.origin,
'discovery_date': datetime.datetime(2017, 1, 1, 23, 0, 0,
tzinfo=datetime.timezone.utc),
'provider': self.provider,
'tool': 'swh-deposit',
'metadata': {
'name': 'test_origin_metadata',
'version': '0.0.1'
}
}
self.date_visit1 = datetime.datetime(2015, 1, 1, 23, 0, 0,
tzinfo=datetime.timezone.utc)
self.occurrence = {
'branch': b'master',
'target': self.revision['id'],
'target_type': 'revision',
}
self.date_visit2 = datetime.datetime(2015, 1, 1, 23, 0, 0,
tzinfo=datetime.timezone.utc)
self.occurrence2 = {
'branch': b'master',
'target': self.revision2['id'],
'target_type': 'revision',
}
self.date_visit3 = datetime.datetime(2015, 1, 1, 23, 0, 0,
tzinfo=datetime.timezone.utc)
# template occurrence to be filled in test (cf. revision_log_by)
self.occurrence3 = {
'branch': b'master',
'target_type': 'revision',
}
self.release = {
'id': b'87659012345678901234',
'name': b'v0.0.1',
'author': {
'name': b'olasd',
'email': b'nic@olasd.fr',
'fullname': b'olasd <nic@olasd.fr>',
},
'date': {
'timestamp': 1234567890,
'offset': 42,
'negative_utc': None,
},
'target': b'43210987654321098765',
'target_type': 'revision',
'message': b'synthetic release',
'synthetic': True,
}
self.release2 = {
'id': b'56789012348765901234',
'name': b'v0.0.2',
'author': {
'name': b'tony',
'email': b'ar@dumont.fr',
'fullname': b'tony <ar@dumont.fr>',
},
'date': {
'timestamp': 1634366813,
'offset': -120,
'negative_utc': None,
},
'target': b'432109\xa9765432\xc309\x00765',
'target_type': 'revision',
'message': b'v0.0.2\nMisc performance improvments + bug fixes',
'synthetic': False
}
self.release3 = {
'id': b'87659012345678904321',
'name': b'v0.0.2',
'author': {
'name': b'tony',
'email': b'tony@ardumont.fr',
'fullname': b'tony <tony@ardumont.fr>',
},
'date': {
'timestamp': 1634336813,
'offset': 0,
'negative_utc': False,
},
'target': self.revision2['id'],
'target_type': 'revision',
'message': b'yet another synthetic release',
'synthetic': True,
}
self.fetch_history_date = datetime.datetime(
2015, 1, 2, 21, 0, 0,
tzinfo=datetime.timezone.utc)
self.fetch_history_end = datetime.datetime(
2015, 1, 2, 23, 0, 0,
tzinfo=datetime.timezone.utc)
self.fetch_history_duration = (self.fetch_history_end -
self.fetch_history_date)
self.fetch_history_data = {
'status': True,
'result': {'foo': 'bar'},
'stdout': 'blabla',
'stderr': 'blablabla',
}
self.entity1 = {
'uuid': UUID('f96a7ec1-0058-4920-90cc-7327e4b5a4bf'),
# GitHub users
'parent': UUID('ad6df473-c1d2-4f40-bc58-2b091d4a750e'),
'name': 'github:user:olasd',
'type': 'person',
'description': 'Nicolas Dandrimont',
'homepage': 'http://example.com',
'active': True,
'generated': True,
'lister_metadata': {
# swh.lister.github
'lister': '34bd6b1b-463f-43e5-a697-785107f598e4',
'id': 12877,
'type': 'user',
'last_activity': '2015-11-03',
},
'metadata': None,
'validity': [
datetime.datetime(2015, 11, 3, 11, 0, 0,
tzinfo=datetime.timezone.utc),
]
}
self.entity1_query = {
'lister': '34bd6b1b-463f-43e5-a697-785107f598e4',
'id': 12877,
'type': 'user',
}
self.entity2 = {
'uuid': UUID('3903d075-32d6-46d4-9e29-0aef3612c4eb'),
# GitHub users
'parent': UUID('ad6df473-c1d2-4f40-bc58-2b091d4a750e'),
'name': 'github:user:zacchiro',
'type': 'person',
'description': 'Stefano Zacchiroli',
'homepage': 'http://example.com',
'active': True,
'generated': True,
'lister_metadata': {
# swh.lister.github
'lister': '34bd6b1b-463f-43e5-a697-785107f598e4',
'id': 216766,
'type': 'user',
'last_activity': '2015-11-03',
},
'metadata': None,
'validity': [
datetime.datetime(2015, 11, 3, 11, 0, 0,
tzinfo=datetime.timezone.utc),
]
}
self.entity3 = {
'uuid': UUID('111df473-c1d2-4f40-bc58-2b091d4a7111'),
# GitHub users
'parent': UUID('222df473-c1d2-4f40-bc58-2b091d4a7222'),
'name': 'github:user:ardumont',
'type': 'person',
'description': 'Antoine R. Dumont a.k.a tony',
'homepage': 'https://ardumont.github.io',
'active': True,
'generated': True,
'lister_metadata': {
'lister': '34bd6b1b-463f-43e5-a697-785107f598e4',
'id': 666,
'type': 'user',
'last_activity': '2016-01-15',
},
'metadata': None,
'validity': [
datetime.datetime(2015, 11, 3, 11, 0, 0,
tzinfo=datetime.timezone.utc),
]
}
self.entity4 = {
'uuid': UUID('222df473-c1d2-4f40-bc58-2b091d4a7222'),
# GitHub users
'parent': None,
'name': 'github:user:ToNyX',
'type': 'person',
'description': 'ToNyX',
'homepage': 'https://ToNyX.github.io',
'active': True,
'generated': True,
'lister_metadata': {
'lister': '34bd6b1b-463f-43e5-a697-785107f598e4',
'id': 999,
'type': 'user',
'last_activity': '2015-12-24',
},
'metadata': None,
'validity': [
datetime.datetime(2015, 11, 3, 11, 0, 0,
tzinfo=datetime.timezone.utc),
]
}
self.entity2_query = {
'lister_metadata': {
'lister': '34bd6b1b-463f-43e5-a697-785107f598e4',
'id': 216766,
'type': 'user',
},
}
self.snapshot = {
'id': hash_to_bytes('2498dbf535f882bc7f9a18fb16c9ad27fda7bab7'),
'branches': {
self.occurrence['branch']: {
'target': self.occurrence['target'],
'target_type': self.occurrence['target_type'],
},
},
}
self.empty_snapshot = {
'id': hash_to_bytes('1a8893e6a86f444e8be8e7bda6cb34fb1735a00e'),
'branches': {},
}
self.complete_snapshot = {
'id': hash_to_bytes('6e65b86363953b780d92b0a928f3e8fcdd10db36'),
'branches': {
b'directory': {
'target': hash_to_bytes(
'1bd0e65f7d2ff14ae994de17a1e7fe65111dcad8'),
'target_type': 'directory',
},
b'content': {
'target': hash_to_bytes(
'fe95a46679d128ff167b7c55df5d02356c5a1ae1'),
'target_type': 'content',
},
b'alias': {
'target': b'revision',
'target_type': 'alias',
},
b'revision': {
'target': hash_to_bytes(
'aafb16d69fd30ff58afdd69036a26047f3aebdc6'),
'target_type': 'revision',
},
b'release': {
'target': hash_to_bytes(
'7045404f3d1c54e6473c71bbb716529fbad4be24'),
'target_type': 'release',
},
b'snapshot': {
'target': hash_to_bytes(
'1a8893e6a86f444e8be8e7bda6cb34fb1735a00e'),
'target_type': 'snapshot',
},
b'dangling': None,
}
}
def tearDown(self):
self.reset_storage_tables()
super().tearDown()
class CommonTestStorage(BaseTestStorage):
"""Base class for Storage testing.
This class is used as-is to test local storage (see TestLocalStorage
below) and remote storage (see TestRemoteStorage in
test_remote_storage.py.
We need to have the two classes inherit from this base class
separately to avoid nosetests running the tests from the base
class twice.
"""
@staticmethod
def normalize_entity(entity):
entity = copy.deepcopy(entity)
for key in ('date', 'committer_date'):
if key in entity:
entity[key] = identifiers.normalize_timestamp(entity[key])
return entity
@istest
def check_config(self):
self.assertTrue(self.storage.check_config(check_write=True))
self.assertTrue(self.storage.check_config(check_write=False))
@istest
def content_add(self):
cont = self.cont
self.storage.content_add([cont])
if hasattr(self.storage, 'objstorage'):
self.assertIn(cont['sha1'], self.storage.objstorage)
self.cursor.execute('SELECT sha1, sha1_git, sha256, length, status'
' FROM content WHERE sha1 = %s',
(cont['sha1'],))
datum = self.cursor.fetchone()
self.assertEqual(
(datum[0].tobytes(), datum[1].tobytes(), datum[2].tobytes(),
datum[3], datum[4]),
(cont['sha1'], cont['sha1_git'], cont['sha256'],
cont['length'], 'visible'))
@istest
def content_add_collision(self):
cont1 = self.cont
# create (corrupted) content with same sha1{,_git} but != sha256
cont1b = cont1.copy()
sha256_array = bytearray(cont1b['sha256'])
sha256_array[0] += 1
cont1b['sha256'] = bytes(sha256_array)
with self.assertRaises(psycopg2.IntegrityError):
self.storage.content_add([cont1, cont1b])
@istest
def skipped_content_add(self):
cont = self.skipped_cont.copy()
cont2 = self.skipped_cont2.copy()
cont2['blake2s256'] = None
self.storage.content_add([cont, cont, cont2])
self.cursor.execute('SELECT sha1, sha1_git, sha256, blake2s256, '
'length, status, reason '
'FROM skipped_content ORDER BY sha1_git')
datums = self.cursor.fetchall()
self.assertEquals(2, len(datums))
datum = datums[0]
self.assertEqual(
(datum[0].tobytes(), datum[1].tobytes(), datum[2].tobytes(),
datum[3].tobytes(), datum[4], datum[5], datum[6]),
(cont['sha1'], cont['sha1_git'], cont['sha256'],
cont['blake2s256'], cont['length'], 'absent',
'Content too long')
)
datum2 = datums[1]
self.assertEqual(
(datum2[0].tobytes(), datum2[1].tobytes(), datum2[2].tobytes(),
datum2[3], datum2[4], datum2[5], datum2[6]),
(cont2['sha1'], cont2['sha1_git'], cont2['sha256'],
cont2['blake2s256'], cont2['length'], 'absent',
'Content too long')
)
@istest
def content_missing(self):
cont2 = self.cont2
missing_cont = self.missing_cont
self.storage.content_add([cont2])
gen = self.storage.content_missing([cont2, missing_cont])
self.assertEqual(list(gen), [missing_cont['sha1']])
@istest
def content_missing_per_sha1(self):
# given
cont2 = self.cont2
missing_cont = self.missing_cont
self.storage.content_add([cont2])
# when
gen = self.storage.content_missing_per_sha1([cont2['sha1'],
missing_cont['sha1']])
# then
self.assertEqual(list(gen), [missing_cont['sha1']])
@istest
def content_get_metadata(self):
cont1 = self.cont.copy()
cont2 = self.cont2.copy()
self.storage.content_add([cont1, cont2])
gen = self.storage.content_get_metadata([cont1['sha1'], cont2['sha1']])
# we only retrieve the metadata
cont1.pop('data')
cont2.pop('data')
self.assertEqual(list(gen), [cont1, cont2])
@istest
def content_get_metadata_missing_sha1(self):
cont1 = self.cont.copy()
cont2 = self.cont2.copy()
missing_cont = self.missing_cont.copy()
self.storage.content_add([cont1, cont2])
gen = self.storage.content_get_metadata([missing_cont['sha1']])
# All the metadata keys are None
missing_cont.pop('data')
for key in list(missing_cont):
if key != 'sha1':
missing_cont[key] = None
self.assertEqual(list(gen), [missing_cont])
@istest
def directory_get(self):
# given
init_missing = list(self.storage.directory_missing([self.dir['id']]))
self.assertEqual([self.dir['id']], init_missing)
self.storage.directory_add([self.dir])
# when
actual_dirs = list(self.storage.directory_get([self.dir['id']]))
self.assertEqual(len(actual_dirs), 1)
dir0 = actual_dirs[0]
self.assertEqual(dir0['id'], self.dir['id'])
# ids are generated so non deterministic value
self.assertEqual(len(dir0['file_entries']), 1)
self.assertEqual(len(dir0['dir_entries']), 1)
self.assertIsNone(dir0['rev_entries'])
after_missing = list(self.storage.directory_missing([self.dir['id']]))
self.assertEqual([], after_missing)
@istest
def directory_add(self):
init_missing = list(self.storage.directory_missing([self.dir['id']]))
self.assertEqual([self.dir['id']], init_missing)
self.storage.directory_add([self.dir])
stored_data = list(self.storage.directory_ls(self.dir['id']))
data_to_store = []
for ent in sorted(self.dir['entries'], key=itemgetter('name')):
data_to_store.append({
'dir_id': self.dir['id'],
'type': ent['type'],
'target': ent['target'],
'name': ent['name'],
'perms': ent['perms'],
'status': None,
'sha1': None,
'sha1_git': None,
'sha256': None,
'length': None,
})
self.assertEqual(data_to_store, stored_data)
after_missing = list(self.storage.directory_missing([self.dir['id']]))
self.assertEqual([], after_missing)
@istest
def directory_entry_get_by_path(self):
# given
init_missing = list(self.storage.directory_missing([self.dir3['id']]))
self.assertEqual([self.dir3['id']], init_missing)
self.storage.directory_add([self.dir3])
expected_entries = [
{
'dir_id': self.dir3['id'],
'name': b'foo',
'type': 'file',
'target': self.cont['sha1_git'],
'sha1': None,
'sha1_git': None,
'sha256': None,
'status': None,
'perms': from_disk.DentryPerms.content,
'length': None,
},
{
'dir_id': self.dir3['id'],
'name': b'bar',
'type': 'dir',
'target': b'12345678901234560000',
'sha1': None,
'sha1_git': None,
'sha256': None,
'status': None,
'perms': from_disk.DentryPerms.directory,
'length': None,
},
{
'dir_id': self.dir3['id'],
'name': b'hello',
'type': 'file',
'target': b'12345678901234567890',
'sha1': None,
'sha1_git': None,
'sha256': None,
'status': None,
'perms': from_disk.DentryPerms.content,
'length': None,
},
]
# when (all must be found here)
for entry, expected_entry in zip(self.dir3['entries'],
expected_entries):
actual_entry = self.storage.directory_entry_get_by_path(
self.dir3['id'],
[entry['name']])
self.assertEqual(actual_entry, expected_entry)
# when (nothing should be found here since self.dir is not persisted.)
for entry in self.dir['entries']:
actual_entry = self.storage.directory_entry_get_by_path(
self.dir['id'],
[entry['name']])
self.assertIsNone(actual_entry)
@istest
def revision_add(self):
init_missing = self.storage.revision_missing([self.revision['id']])
self.assertEqual([self.revision['id']], list(init_missing))
self.storage.revision_add([self.revision])
end_missing = self.storage.revision_missing([self.revision['id']])
self.assertEqual([], list(end_missing))
@istest
def revision_log(self):
# given
# self.revision4 -is-child-of-> self.revision3
self.storage.revision_add([self.revision3,
self.revision4])
# when
actual_results = list(self.storage.revision_log(
[self.revision4['id']]))
# hack: ids generated
for actual_result in actual_results:
del actual_result['author']['id']
del actual_result['committer']['id']
self.assertEqual(len(actual_results), 2) # rev4 -child-> rev3
self.assertEquals(actual_results[0],
self.normalize_entity(self.revision4))
self.assertEquals(actual_results[1],
self.normalize_entity(self.revision3))
@istest
def revision_log_with_limit(self):
# given
# self.revision4 -is-child-of-> self.revision3
self.storage.revision_add([self.revision3,
self.revision4])
actual_results = list(self.storage.revision_log(
[self.revision4['id']], 1))
# hack: ids generated
for actual_result in actual_results:
del actual_result['author']['id']
del actual_result['committer']['id']
self.assertEqual(len(actual_results), 1)
self.assertEquals(actual_results[0], self.revision4)
@istest
def revision_log_by(self):
# given
origin_id = self.storage.origin_add_one(self.origin2)
self.storage.revision_add([self.revision3,
self.revision4])
# occurrence3 targets 'revision4'
# with branch 'master' and origin origin_id
occurrence3 = self.occurrence3.copy()
date_visit1 = self.date_visit3
origin_visit1 = self.storage.origin_visit_add(origin_id,
date_visit1)
occurrence3.update({
'origin': origin_id,
'target': self.revision4['id'],
'visit': origin_visit1['visit'],
})
self.storage.occurrence_add([occurrence3])
# self.revision4 -is-child-of-> self.revision3
# when
actual_results = list(self.storage.revision_log_by(
origin_id,
branch_name=occurrence3['branch'],
timestamp=date_visit1))
# hack: ids generated
for actual_result in actual_results:
del actual_result['author']['id']
del actual_result['committer']['id']
self.assertEqual(len(actual_results), 2)
self.assertEquals(actual_results[0],
self.normalize_entity(self.revision4))
self.assertEquals(actual_results[1],
self.normalize_entity(self.revision3))
# when - 2
actual_results = list(self.storage.revision_log_by(
origin_id,
branch_name=None,
timestamp=None,
limit=1))
# then
for actual_result in actual_results:
del actual_result['author']['id']
del actual_result['committer']['id']
self.assertEqual(len(actual_results), 1)
self.assertEquals(actual_results[0], self.revision4)
# when - 3 (revision not found)
actual_res = list(self.storage.revision_log_by(
origin_id,
branch_name='inexistant-branch',
timestamp=None))
self.assertEquals(actual_res, [])
@staticmethod
def _short_revision(revision):
return [revision['id'], revision['parents']]
@istest
def revision_shortlog(self):
# given
# self.revision4 -is-child-of-> self.revision3
self.storage.revision_add([self.revision3,
self.revision4])
# when
actual_results = list(self.storage.revision_shortlog(
[self.revision4['id']]))
self.assertEqual(len(actual_results), 2) # rev4 -child-> rev3
self.assertEquals(list(actual_results[0]),
self._short_revision(self.revision4))
self.assertEquals(list(actual_results[1]),
self._short_revision(self.revision3))
@istest
def revision_shortlog_with_limit(self):
# given
# self.revision4 -is-child-of-> self.revision3
self.storage.revision_add([self.revision3,
self.revision4])
actual_results = list(self.storage.revision_shortlog(
[self.revision4['id']], 1))
self.assertEqual(len(actual_results), 1)
self.assertEquals(list(actual_results[0]),
self._short_revision(self.revision4))
@istest
def revision_get(self):
self.storage.revision_add([self.revision])
actual_revisions = list(self.storage.revision_get(
[self.revision['id'], self.revision2['id']]))
# when
del actual_revisions[0]['author']['id'] # hack: ids are generated
del actual_revisions[0]['committer']['id']
self.assertEqual(len(actual_revisions), 2)
self.assertEqual(actual_revisions[0],
self.normalize_entity(self.revision))
self.assertIsNone(actual_revisions[1])
@istest
def revision_get_no_parents(self):
self.storage.revision_add([self.revision3])
get = list(self.storage.revision_get([self.revision3['id']]))
self.assertEqual(len(get), 1)
self.assertEqual(get[0]['parents'], []) # no parents on this one
@istest
def revision_get_by(self):
# given
self.storage.content_add([self.cont2])
self.storage.directory_add([self.dir2]) # point to self.cont
self.storage.revision_add([self.revision2]) # points to self.dir
origin_id = self.storage.origin_add_one(self.origin2)
# occurrence2 points to 'revision2' with branch 'master', we
# need to point to the right origin
occurrence2 = self.occurrence2.copy()
date_visit1 = self.date_visit2
origin_visit1 = self.storage.origin_visit_add(origin_id, date_visit1)
occurrence2.update({
'origin': origin_id,
'visit': origin_visit1['visit'],
})
self.storage.occurrence_add([occurrence2])
# we want only revision 2
expected_revisions = list(self.storage.revision_get(
[self.revision2['id']]))
# when
actual_results = list(self.storage.revision_get_by(
origin_id,
occurrence2['branch'],
None))
self.assertEqual(actual_results[0], expected_revisions[0])
# when (with no branch filtering, it's still ok)
actual_results = list(self.storage.revision_get_by(
origin_id,
None,
None))
self.assertEqual(actual_results[0], expected_revisions[0])
@istest
def revision_get_by_multiple_occurrence(self):
# 2 occurrences pointing to 2 different revisions
# each occurence have 1 day delta
# the api must return the revision whose occurrence is the nearest.
# given
self.storage.content_add([self.cont2])
self.storage.directory_add([self.dir2])
self.storage.revision_add([self.revision2, self.revision3])
origin_id = self.storage.origin_add_one(self.origin2)
# occurrence2 points to 'revision2' with branch 'master', we
# need to point to the right origin
date_visit1 = self.date_visit2
origin_visit1 = self.storage.origin_visit_add(origin_id, date_visit1)
occurrence2 = self.occurrence2.copy()
occurrence2.update({
'origin': origin_id,
'visit': origin_visit1['visit']
})
dt = datetime.timedelta(days=1)
date_visit2 = date_visit1 + dt
origin_visit2 = self.storage.origin_visit_add(origin_id, date_visit2)
occurrence3 = self.occurrence2.copy()
occurrence3.update({
'origin': origin_id,
'visit': origin_visit2['visit'],
'target': self.revision3['id'],
})
# 2 occurrences on same revision with lower validity date with 1 day
# delta
self.storage.occurrence_add([occurrence2])
self.storage.occurrence_add([occurrence3])
# when
actual_results0 = list(self.storage.revision_get_by(
origin_id,
occurrence2['branch'],
date_visit1))
# hack: ids are generated
del actual_results0[0]['author']['id']
del actual_results0[0]['committer']['id']
self.assertEquals(len(actual_results0), 1)
self.assertEqual(actual_results0,
[self.normalize_entity(self.revision2)])
# when
actual_results1 = list(self.storage.revision_get_by(
origin_id,
occurrence2['branch'],
date_visit1 + dt/3)) # closer to first visit
# hack: ids are generated
del actual_results1[0]['author']['id']
del actual_results1[0]['committer']['id']
self.assertEquals(len(actual_results1), 1)
self.assertEqual(actual_results1,
[self.normalize_entity(self.revision2)])
# when
actual_results2 = list(self.storage.revision_get_by(
origin_id,
occurrence2['branch'],
date_visit1 + 2*dt/3)) # closer to second visit
del actual_results2[0]['author']['id']
del actual_results2[0]['committer']['id']
self.assertEquals(len(actual_results2), 1)
self.assertEqual(actual_results2,
[self.normalize_entity(self.revision3)])
# when
actual_results3 = list(self.storage.revision_get_by(
origin_id,
occurrence3['branch'],
date_visit2))
# hack: ids are generated
del actual_results3[0]['author']['id']
del actual_results3[0]['committer']['id']
self.assertEquals(len(actual_results3), 1)
self.assertEqual(actual_results3,
[self.normalize_entity(self.revision3)])
# when
actual_results4 = list(self.storage.revision_get_by(
origin_id,
None,
None))
for actual_result in actual_results4:
del actual_result['author']['id']
del actual_result['committer']['id']
self.assertEquals(len(actual_results4), 1)
self.assertCountEqual(actual_results4,
[self.normalize_entity(self.revision3)])
@istest
def release_add(self):
init_missing = self.storage.release_missing([self.release['id'],
self.release2['id']])
self.assertEqual([self.release['id'], self.release2['id']],
list(init_missing))
self.storage.release_add([self.release, self.release2])
end_missing = self.storage.release_missing([self.release['id'],
self.release2['id']])
self.assertEqual([], list(end_missing))
@istest
def release_get(self):
# given
self.storage.release_add([self.release, self.release2])
# when
actual_releases = list(self.storage.release_get([self.release['id'],
self.release2['id']]))
# then
for actual_release in actual_releases:
del actual_release['author']['id'] # hack: ids are generated
self.assertEquals([self.normalize_entity(self.release),
self.normalize_entity(self.release2)],
[actual_releases[0], actual_releases[1]])
@istest
def release_get_by(self):
# given
self.storage.revision_add([self.revision2]) # points to self.dir
self.storage.release_add([self.release3])
origin_id = self.storage.origin_add_one(self.origin2)
# occurrence2 points to 'revision2' with branch 'master', we
# need to point to the right origin
origin_visit = self.storage.origin_visit_add(origin_id,
self.date_visit2)
occurrence2 = self.occurrence2.copy()
occurrence2.update({
'origin': origin_id,
'visit': origin_visit['visit'],
})
self.storage.occurrence_add([occurrence2])
# we want only revision 2
expected_releases = list(self.storage.release_get(
[self.release3['id']]))
# when
actual_results = list(self.storage.release_get_by(
occurrence2['origin']))
# then
self.assertEqual(actual_results[0], expected_releases[0])
@istest
def origin_add_one(self):
origin0 = self.storage.origin_get(self.origin)
self.assertIsNone(origin0)
id = self.storage.origin_add_one(self.origin)
actual_origin = self.storage.origin_get({'url': self.origin['url'],
'type': self.origin['type']})
self.assertEqual(actual_origin['id'], id)
id2 = self.storage.origin_add_one(self.origin)
self.assertEqual(id, id2)
@istest
def origin_add(self):
origin0 = self.storage.origin_get(self.origin)
self.assertIsNone(origin0)
id1, id2 = self.storage.origin_add([self.origin, self.origin2])
actual_origin = self.storage.origin_get({
'url': self.origin['url'],
'type': self.origin['type'],
})
self.assertEqual(actual_origin['id'], id1)
actual_origin2 = self.storage.origin_get({
'url': self.origin2['url'],
'type': self.origin2['type'],
})
self.assertEqual(actual_origin2['id'], id2)
@istest
def origin_add_twice(self):
add1 = self.storage.origin_add([self.origin, self.origin2])
add2 = self.storage.origin_add([self.origin, self.origin2])
self.assertEqual(add1, add2)
@istest
def origin_get(self):
self.assertIsNone(self.storage.origin_get(self.origin))
id = self.storage.origin_add_one(self.origin)
# lookup per type and url (returns id)
actual_origin0 = self.storage.origin_get({'url': self.origin['url'],
'type': self.origin['type']})
self.assertEqual(actual_origin0['id'], id)
# lookup per id (returns dict)
actual_origin1 = self.storage.origin_get({'id': id})
self.assertEqual(actual_origin1, {'id': id,
'type': self.origin['type'],
'url': self.origin['url'],
'lister': None,
'project': None})
@istest
def origin_search(self):
found_origins = list(self.storage.origin_search(self.origin['url']))
self.assertEqual(len(found_origins), 0)
found_origins = list(self.storage.origin_search(self.origin['url'],
regexp=True))
self.assertEqual(len(found_origins), 0)
id = self.storage.origin_add_one(self.origin)
origin_data = {'id': id,
'type': self.origin['type'],
'url': self.origin['url'],
'lister': None,
'project': None}
found_origins = list(self.storage.origin_search(self.origin['url']))
self.assertEqual(len(found_origins), 1)
self.assertEqual(found_origins[0], origin_data)
found_origins = list(self.storage.origin_search(
'.' + self.origin['url'][1:-1] + '.', regexp=True))
self.assertEqual(len(found_origins), 1)
self.assertEqual(found_origins[0], origin_data)
id2 = self.storage.origin_add_one(self.origin2)
origin2_data = {'id': id2,
'type': self.origin2['type'],
'url': self.origin2['url'],
'lister': None,
'project': None}
found_origins = list(self.storage.origin_search(self.origin2['url']))
self.assertEqual(len(found_origins), 1)
self.assertEqual(found_origins[0], origin2_data)
found_origins = list(self.storage.origin_search(
'.' + self.origin2['url'][1:-1] + '.', regexp=True))
self.assertEqual(len(found_origins), 1)
self.assertEqual(found_origins[0], origin2_data)
found_origins = list(self.storage.origin_search('/'))
self.assertEqual(len(found_origins), 2)
found_origins = list(self.storage.origin_search('.*/.*', regexp=True))
self.assertEqual(len(found_origins), 2)
found_origins = list(self.storage.origin_search('/', offset=0, limit=1)) # noqa
self.assertEqual(len(found_origins), 1)
self.assertEqual(found_origins[0], origin_data)
found_origins = list(self.storage.origin_search('.*/.*', offset=0, limit=1, regexp=True)) # noqa
self.assertEqual(len(found_origins), 1)
self.assertEqual(found_origins[0], origin_data)
found_origins = list(self.storage.origin_search('/', offset=1, limit=1)) # noqa
self.assertEqual(len(found_origins), 1)
self.assertEqual(found_origins[0], origin2_data)
found_origins = list(self.storage.origin_search('.*/.*', offset=1, limit=1, regexp=True)) # noqa
self.assertEqual(len(found_origins), 1)
self.assertEqual(found_origins[0], origin2_data)
@istest
def origin_visit_add(self):
# given
self.assertIsNone(self.storage.origin_get(self.origin2))
origin_id = self.storage.origin_add_one(self.origin2)
self.assertIsNotNone(origin_id)
# when
origin_visit1 = self.storage.origin_visit_add(
origin_id,
ts=self.date_visit2)
# then
self.assertEquals(origin_visit1['origin'], origin_id)
self.assertIsNotNone(origin_visit1['visit'])
self.assertTrue(origin_visit1['visit'] > 0)
actual_origin_visits = list(self.storage.origin_visit_get(origin_id))
self.assertEquals(actual_origin_visits,
[{
'origin': origin_id,
'date': self.date_visit2,
'visit': origin_visit1['visit'],
'status': 'ongoing',
'metadata': None,
'snapshot': None,
}])
@istest
def origin_visit_update(self):
# given
origin_id = self.storage.origin_add_one(self.origin2)
origin_id2 = self.storage.origin_add_one(self.origin)
origin_visit1 = self.storage.origin_visit_add(
origin_id,
ts=self.date_visit2)
origin_visit2 = self.storage.origin_visit_add(
origin_id,
ts=self.date_visit3)
origin_visit3 = self.storage.origin_visit_add(
origin_id2,
ts=self.date_visit3)
# when
visit1_metadata = {
'contents': 42,
'directories': 22,
}
self.storage.origin_visit_update(
origin_id, origin_visit1['visit'], status='full',
metadata=visit1_metadata)
self.storage.origin_visit_update(origin_id2, origin_visit3['visit'],
status='partial')
# then
actual_origin_visits = list(self.storage.origin_visit_get(origin_id))
self.assertEquals(actual_origin_visits, [{
'origin': origin_visit2['origin'],
'date': self.date_visit2,
'visit': origin_visit1['visit'],
'status': 'full',
'metadata': visit1_metadata,
'snapshot': None,
}, {
'origin': origin_visit2['origin'],
'date': self.date_visit3,
'visit': origin_visit2['visit'],
'status': 'ongoing',
'metadata': None,
'snapshot': None,
}])
actual_origin_visits_bis = list(self.storage.origin_visit_get(
origin_id, limit=1))
self.assertEquals(actual_origin_visits_bis,
[{
'origin': origin_visit2['origin'],
'date': self.date_visit2,
'visit': origin_visit1['visit'],
'status': 'full',
'metadata': visit1_metadata,
'snapshot': None,
}])
actual_origin_visits_ter = list(self.storage.origin_visit_get(
origin_id, last_visit=origin_visit1['visit']))
self.assertEquals(actual_origin_visits_ter,
[{
'origin': origin_visit2['origin'],
'date': self.date_visit3,
'visit': origin_visit2['visit'],
'status': 'ongoing',
'metadata': None,
'snapshot': None,
}])
actual_origin_visits2 = list(self.storage.origin_visit_get(origin_id2))
self.assertEquals(actual_origin_visits2,
[{
'origin': origin_visit3['origin'],
'date': self.date_visit3,
'visit': origin_visit3['visit'],
'status': 'partial',
'metadata': None,
'snapshot': None,
}])
@istest
def origin_visit_get_by(self):
origin_id = self.storage.origin_add_one(self.origin2)
origin_id2 = self.storage.origin_add_one(self.origin)
origin_visit1 = self.storage.origin_visit_add(
origin_id,
ts=self.date_visit2)
occurrence2 = self.occurrence2.copy()
occurrence2.update({
'origin': origin_id,
'visit': origin_visit1['visit'],
})
self.storage.occurrence_add([occurrence2])
# Add some other {origin, visit} entries
self.storage.origin_visit_add(origin_id, ts=self.date_visit3)
self.storage.origin_visit_add(origin_id2, ts=self.date_visit3)
# when
visit1_metadata = {
'contents': 42,
'directories': 22,
}
self.storage.origin_visit_update(
origin_id, origin_visit1['visit'], status='full',
metadata=visit1_metadata)
expected_origin_visit = origin_visit1.copy()
expected_origin_visit.update({
'origin': origin_id,
'visit': origin_visit1['visit'],
'date': self.date_visit2,
'metadata': visit1_metadata,
'status': 'full',
'occurrences': {
occurrence2['branch']: {
'target': occurrence2['target'],
'target_type': occurrence2['target_type'],
}
},
'snapshot': None,
})
# when
actual_origin_visit1 = self.storage.origin_visit_get_by(
origin_visit1['origin'], origin_visit1['visit'])
# then
self.assertEquals(actual_origin_visit1, expected_origin_visit)
@istest
def origin_visit_get_by_no_result(self):
# No result
actual_origin_visit = self.storage.origin_visit_get_by(
10, 999)
self.assertIsNone(actual_origin_visit)
@istest
def occurrence_add(self):
occur = self.occurrence.copy()
origin_id = self.storage.origin_add_one(self.origin2)
date_visit1 = self.date_visit1
origin_visit1 = self.storage.origin_visit_add(origin_id, date_visit1)
revision = self.revision.copy()
revision['id'] = occur['target']
self.storage.revision_add([revision])
occur.update({
'origin': origin_id,
'visit': origin_visit1['visit'],
})
self.storage.occurrence_add([occur])
test_query = '''
with indiv_occurrences as (
select origin, branch, target, target_type, unnest(visits) as visit
from occurrence_history
)
select origin, branch, target, target_type, date
from indiv_occurrences
left join origin_visit using(origin, visit)
order by origin, date'''
self.cursor.execute(test_query)
ret = self.cursor.fetchall()
self.assertEqual(len(ret), 1)
self.assertEqual(
(ret[0][0], ret[0][1].tobytes(), ret[0][2].tobytes(),
ret[0][3], ret[0][4]),
(occur['origin'], occur['branch'], occur['target'],
occur['target_type'], self.date_visit1))
date_visit2 = date_visit1 + datetime.timedelta(hours=10)
origin_visit2 = self.storage.origin_visit_add(origin_id, date_visit2)
occur2 = occur.copy()
occur2.update({
'visit': origin_visit2['visit'],
})
self.storage.occurrence_add([occur2])
self.cursor.execute(test_query)
ret = self.cursor.fetchall()
self.assertEqual(len(ret), 2)
self.assertEqual(
(ret[0][0], ret[0][1].tobytes(), ret[0][2].tobytes(),
ret[0][3], ret[0][4]),
(occur['origin'], occur['branch'], occur['target'],
occur['target_type'], date_visit1))
self.assertEqual(
(ret[1][0], ret[1][1].tobytes(), ret[1][2].tobytes(),
ret[1][3], ret[1][4]),
(occur2['origin'], occur2['branch'], occur2['target'],
occur2['target_type'], date_visit2))
@istest
def occurrence_get(self):
# given
occur = self.occurrence.copy()
origin_id = self.storage.origin_add_one(self.origin2)
origin_visit1 = self.storage.origin_visit_add(origin_id,
self.date_visit1)
revision = self.revision.copy()
revision['id'] = occur['target']
self.storage.revision_add([revision])
occur.update({
'origin': origin_id,
'visit': origin_visit1['visit'],
})
self.storage.occurrence_add([occur])
self.storage.occurrence_add([occur])
# when
actual_occurrence = list(self.storage.occurrence_get(origin_id))
# then
expected_occurrence = self.occurrence.copy()
expected_occurrence.update({
'origin': origin_id
})
self.assertEquals(len(actual_occurrence), 1)
self.assertEquals(actual_occurrence[0], expected_occurrence)
@istest
def snapshot_add_get_empty(self):
origin_id = self.storage.origin_add_one(self.origin)
origin_visit1 = self.storage.origin_visit_add(origin_id,
self.date_visit1)
visit_id = origin_visit1['visit']
self.storage.snapshot_add(origin_id, visit_id, self.empty_snapshot)
by_id = self.storage.snapshot_get(self.empty_snapshot['id'])
self.assertEqual(by_id, self.empty_snapshot)
by_ov = self.storage.snapshot_get_by_origin_visit(origin_id, visit_id)
self.assertEqual(by_ov, self.empty_snapshot)
@istest
def snapshot_add_get_complete(self):
origin_id = self.storage.origin_add_one(self.origin)
origin_visit1 = self.storage.origin_visit_add(origin_id,
self.date_visit1)
visit_id = origin_visit1['visit']
self.storage.snapshot_add(origin_id, visit_id, self.complete_snapshot)
by_id = self.storage.snapshot_get(self.complete_snapshot['id'])
self.assertEqual(by_id, self.complete_snapshot)
by_ov = self.storage.snapshot_get_by_origin_visit(origin_id, visit_id)
self.assertEqual(by_ov, self.complete_snapshot)
@istest
def snapshot_add_get(self):
origin_id = self.storage.origin_add_one(self.origin)
origin_visit1 = self.storage.origin_visit_add(origin_id,
self.date_visit1)
visit_id = origin_visit1['visit']
self.storage.snapshot_add(origin_id, visit_id, self.snapshot)
by_id = self.storage.snapshot_get(self.snapshot['id'])
self.assertEqual(by_id, self.snapshot)
by_ov = self.storage.snapshot_get_by_origin_visit(origin_id, visit_id)
self.assertEqual(by_ov, self.snapshot)
# retrocompat test
origin_visit_info = self.storage.origin_visit_get_by(origin_id,
visit_id)
self.assertEqual(origin_visit_info['occurrences'],
self.snapshot['branches'])
@istest
def snapshot_add_twice(self):
origin_id = self.storage.origin_add_one(self.origin)
origin_visit1 = self.storage.origin_visit_add(origin_id,
self.date_visit1)
visit1_id = origin_visit1['visit']
self.storage.snapshot_add(origin_id, visit1_id, self.snapshot)
by_ov1 = self.storage.snapshot_get_by_origin_visit(origin_id,
visit1_id)
self.assertEqual(by_ov1, self.snapshot)
origin_visit2 = self.storage.origin_visit_add(origin_id,
self.date_visit2)
visit2_id = origin_visit2['visit']
self.storage.snapshot_add(origin_id, visit2_id, self.snapshot)
by_ov2 = self.storage.snapshot_get_by_origin_visit(origin_id,
visit2_id)
self.assertEqual(by_ov2, self.snapshot)
@istest
def snapshot_get_nonexistent(self):
bogus_snapshot_id = b'bogus snapshot id 00'
bogus_origin_id = 1
bogus_visit_id = 1
by_id = self.storage.snapshot_get(bogus_snapshot_id)
self.assertIsNone(by_id)
by_ov = self.storage.snapshot_get_by_origin_visit(bogus_origin_id,
bogus_visit_id)
self.assertIsNone(by_ov)
@istest
def snapshot_get_retrocompat(self):
empty_retro_snapshot = {
'id': None,
'branches': {},
}
origin_id = self.storage.origin_add_one(self.origin)
origin_visit1 = self.storage.origin_visit_add(origin_id,
self.date_visit1)
visit_id = origin_visit1['visit']
by_ov = self.storage.snapshot_get_by_origin_visit(origin_id, visit_id)
self.assertEqual(by_ov, empty_retro_snapshot)
self.storage.revision_add([self.revision])
self.storage.occurrence_add([{
'origin': origin_id,
'visit': visit_id,
'branch': self.occurrence['branch'],
'target': self.occurrence['target'],
'target_type': self.occurrence['target_type'],
}])
one_branch_retro_snapshot = {
'id': None,
'branches': {
self.occurrence['branch']: {
'target': self.occurrence['target'],
'target_type': self.occurrence['target_type'],
},
},
}
by_ov = self.storage.snapshot_get_by_origin_visit(origin_id, visit_id)
self.assertEqual(by_ov, one_branch_retro_snapshot)
@istest
def snapshot_add_back_compat(self):
origin_id = self.storage.origin_add_one(self.origin)
origin_visit1 = self.storage.origin_visit_add(origin_id,
self.date_visit1)
visit_id = origin_visit1['visit']
self.storage.snapshot_add(origin_id, visit_id, self.complete_snapshot,
back_compat=True)
ov = self.storage.origin_visit_get_by(origin_id, visit_id)
self.assertEquals(ov['occurrences'],
self.complete_snapshot['branches'])
self.assertEquals(ov['snapshot'],
self.complete_snapshot['id'])
origin_visit2 = self.storage.origin_visit_add(origin_id,
self.date_visit2)
visit_id = origin_visit2['visit']
self.storage.snapshot_add(origin_id, visit_id, self.complete_snapshot,
back_compat=False)
ov = self.storage.origin_visit_get_by(origin_id, visit_id)
self.assertEquals(ov['occurrences'],
self.complete_snapshot['branches'])
self.assertEquals(ov['snapshot'],
self.complete_snapshot['id'])
+ @istest
+ def snapshot_get_latest(self):
+ origin_id = self.storage.origin_add_one(self.origin)
+ origin_visit1 = self.storage.origin_visit_add(origin_id,
+ self.date_visit1)
+ visit1_id = origin_visit1['visit']
+ origin_visit2 = self.storage.origin_visit_add(origin_id,
+ self.date_visit2)
+ visit2_id = origin_visit2['visit']
+
+ # Two visits, both with no snapshot: latest snapshot is None
+ self.assertIsNone(self.storage.snapshot_get_latest(origin_id))
+
+ # Add snapshot to visit1, latest snapshot = visit 1 snapshot
+ self.storage.snapshot_add(origin_id, visit1_id, self.complete_snapshot)
+ self.assertEquals(self.complete_snapshot,
+ self.storage.snapshot_get_latest(origin_id))
+
+ # Status filter: both visits are status=ongoing, so no snapshot
+ # returned
+ self.assertIsNone(
+ self.storage.snapshot_get_latest(origin_id,
+ allowed_statuses=['full'])
+ )
+
+ # Mark the first visit as completed and check status filter again
+ self.storage.origin_visit_update(origin_id, visit1_id, status='full')
+ self.assertEquals(
+ self.complete_snapshot,
+ self.storage.snapshot_get_latest(origin_id,
+ allowed_statuses=['full']),
+ )
+
+ # Add snapshot to visit2 and check that the new snapshot is returned
+ self.storage.snapshot_add(origin_id, visit2_id, self.empty_snapshot)
+ self.assertEquals(self.empty_snapshot,
+ self.storage.snapshot_get_latest(origin_id))
+
+ # Check that the status filter is still working
+ self.assertEquals(
+ self.complete_snapshot,
+ self.storage.snapshot_get_latest(origin_id,
+ allowed_statuses=['full']),
+ )
+
@istest
def entity_get_from_lister_metadata(self):
self.storage.entity_add([self.entity1])
fetched_entities = list(
self.storage.entity_get_from_lister_metadata(
[self.entity1_query, self.entity2_query]))
# Entity 1 should have full metadata, with last_seen/last_id instead
# of validity
entity1 = self.entity1.copy()
entity1['last_seen'] = entity1['validity'][0]
del fetched_entities[0]['last_id']
del entity1['validity']
# Entity 2 should have no metadata
entity2 = {
'uuid': None,
'lister_metadata': self.entity2_query.copy(),
}
self.assertEquals(fetched_entities, [entity1, entity2])
@istest
def entity_get_from_lister_metadata_twice(self):
self.storage.entity_add([self.entity1])
fetched_entities1 = list(
self.storage.entity_get_from_lister_metadata(
[self.entity1_query]))
fetched_entities2 = list(
self.storage.entity_get_from_lister_metadata(
[self.entity1_query]))
self.assertEquals(fetched_entities1, fetched_entities2)
@istest
def entity_get(self):
# given
self.storage.entity_add([self.entity4])
self.storage.entity_add([self.entity3])
# when: entity3 -child-of-> entity4
actual_entity3 = list(self.storage.entity_get(self.entity3['uuid']))
self.assertEquals(len(actual_entity3), 2)
# remove dynamic data (modified by db)
entity3 = self.entity3.copy()
entity4 = self.entity4.copy()
del entity3['validity']
del entity4['validity']
del actual_entity3[0]['last_seen']
del actual_entity3[0]['last_id']
del actual_entity3[1]['last_seen']
del actual_entity3[1]['last_id']
self.assertEquals(actual_entity3, [entity3, entity4])
# when: entity4 only child
actual_entity4 = list(self.storage.entity_get(self.entity4['uuid']))
self.assertEquals(len(actual_entity4), 1)
# remove dynamic data (modified by db)
entity4 = self.entity4.copy()
del entity4['validity']
del actual_entity4[0]['last_id']
del actual_entity4[0]['last_seen']
self.assertEquals(actual_entity4, [entity4])
@istest
def entity_get_one(self):
# given
self.storage.entity_add([self.entity3, self.entity4])
# when: entity3 -child-of-> entity4
actual_entity3 = self.storage.entity_get_one(self.entity3['uuid'])
# remove dynamic data (modified by db)
entity3 = self.entity3.copy()
del entity3['validity']
del actual_entity3['last_seen']
del actual_entity3['last_id']
self.assertEquals(actual_entity3, entity3)
@istest
def stat_counters(self):
expected_keys = ['content', 'directory', 'directory_entry_dir',
'occurrence', 'origin', 'person', 'revision']
for key in expected_keys:
self.cursor.execute('select * from swh_update_counter(%s)', (key,))
self.conn.commit()
counters = self.storage.stat_counters()
self.assertTrue(set(expected_keys) <= set(counters))
self.assertIsInstance(counters[expected_keys[0]], int)
@istest
def content_find_with_present_content(self):
# 1. with something to find
cont = self.cont
self.storage.content_add([cont])
actually_present = self.storage.content_find({'sha1': cont['sha1']})
actually_present.pop('ctime')
self.assertEqual(actually_present, {
'sha1': cont['sha1'],
'sha256': cont['sha256'],
'sha1_git': cont['sha1_git'],
'blake2s256': cont['blake2s256'],
'length': cont['length'],
'status': 'visible'
})
# 2. with something to find
actually_present = self.storage.content_find(
{'sha1_git': cont['sha1_git']})
actually_present.pop('ctime')
self.assertEqual(actually_present, {
'sha1': cont['sha1'],
'sha256': cont['sha256'],
'sha1_git': cont['sha1_git'],
'blake2s256': cont['blake2s256'],
'length': cont['length'],
'status': 'visible'
})
# 3. with something to find
actually_present = self.storage.content_find(
{'sha256': cont['sha256']})
actually_present.pop('ctime')
self.assertEqual(actually_present, {
'sha1': cont['sha1'],
'sha256': cont['sha256'],
'sha1_git': cont['sha1_git'],
'blake2s256': cont['blake2s256'],
'length': cont['length'],
'status': 'visible'
})
# 4. with something to find
actually_present = self.storage.content_find({
'sha1': cont['sha1'],
'sha1_git': cont['sha1_git'],
'sha256': cont['sha256'],
'blake2s256': cont['blake2s256'],
})
actually_present.pop('ctime')
self.assertEqual(actually_present, {
'sha1': cont['sha1'],
'sha256': cont['sha256'],
'sha1_git': cont['sha1_git'],
'blake2s256': cont['blake2s256'],
'length': cont['length'],
'status': 'visible'
})
@istest
def content_find_with_non_present_content(self):
# 1. with something that does not exist
missing_cont = self.missing_cont
actually_present = self.storage.content_find(
{'sha1': missing_cont['sha1']})
self.assertIsNone(actually_present)
# 2. with something that does not exist
actually_present = self.storage.content_find(
{'sha1_git': missing_cont['sha1_git']})
self.assertIsNone(actually_present)
# 3. with something that does not exist
actually_present = self.storage.content_find(
{'sha256': missing_cont['sha256']})
self.assertIsNone(actually_present)
@istest
def content_find_bad_input(self):
# 1. with bad input
with self.assertRaises(ValueError):
self.storage.content_find({}) # empty is bad
# 2. with bad input
with self.assertRaises(ValueError):
self.storage.content_find(
{'unknown-sha1': 'something'}) # not the right key
@istest
def object_find_by_sha1_git(self):
sha1_gits = [b'00000000000000000000']
expected = {
b'00000000000000000000': [],
}
self.storage.content_add([self.cont])
sha1_gits.append(self.cont['sha1_git'])
expected[self.cont['sha1_git']] = [{
'sha1_git': self.cont['sha1_git'],
'type': 'content',
'id': self.cont['sha1'],
}]
self.storage.directory_add([self.dir])
sha1_gits.append(self.dir['id'])
expected[self.dir['id']] = [{
'sha1_git': self.dir['id'],
'type': 'directory',
'id': self.dir['id'],
}]
self.storage.revision_add([self.revision])
sha1_gits.append(self.revision['id'])
expected[self.revision['id']] = [{
'sha1_git': self.revision['id'],
'type': 'revision',
'id': self.revision['id'],
}]
self.storage.release_add([self.release])
sha1_gits.append(self.release['id'])
expected[self.release['id']] = [{
'sha1_git': self.release['id'],
'type': 'release',
'id': self.release['id'],
}]
ret = self.storage.object_find_by_sha1_git(sha1_gits)
for val in ret.values():
for obj in val:
del obj['object_id']
self.assertEqual(expected, ret)
@istest
def tool_add(self):
tool = {
'name': 'some-unknown-tool',
'version': 'some-version',
'configuration': {"debian-package": "some-package"},
}
actual_tool = self.storage.tool_get(tool)
self.assertIsNone(actual_tool) # does not exist
# add it
actual_tools = list(self.storage.tool_add([tool]))
self.assertEquals(len(actual_tools), 1)
actual_tool = actual_tools[0]
self.assertIsNotNone(actual_tool) # now it exists
new_id = actual_tool.pop('id')
self.assertEquals(actual_tool, tool)
actual_tools2 = list(self.storage.tool_add([tool]))
actual_tool2 = actual_tools2[0]
self.assertIsNotNone(actual_tool2) # now it exists
new_id2 = actual_tool2.pop('id')
self.assertEqual(new_id, new_id2)
self.assertEqual(actual_tool, actual_tool2)
@istest
def tool_add_multiple(self):
tool = {
'name': 'some-unknown-tool',
'version': 'some-version',
'configuration': {"debian-package": "some-package"},
}
actual_tools = list(self.storage.tool_add([tool]))
self.assertEqual(len(actual_tools), 1)
new_tools = [tool, {
'name': 'yet-another-tool',
'version': 'version',
'configuration': {},
}]
actual_tools = list(self.storage.tool_add(new_tools))
self.assertEqual(len(actual_tools), 2)
# order not guaranteed, so we iterate over results to check
for tool in actual_tools:
_id = tool.pop('id')
self.assertIsNotNone(_id)
self.assertIn(tool, new_tools)
@istest
def tool_get_missing(self):
tool = {
'name': 'unknown-tool',
'version': '3.1.0rc2-31-ga2cbb8c',
'configuration': {"command_line": "nomossa <filepath>"},
}
actual_tool = self.storage.tool_get(tool)
self.assertIsNone(actual_tool)
@istest
def tool_metadata_get_missing_context(self):
tool = {
'name': 'swh-metadata-translator',
'version': '0.0.1',
'configuration': {"context": "unknown-context"},
}
actual_tool = self.storage.tool_get(tool)
self.assertIsNone(actual_tool)
@istest
def tool_metadata_get(self):
tool = {
'name': 'swh-metadata-translator',
'version': '0.0.1',
'configuration': {"type": "local", "context": "npm"},
}
tools = list(self.storage.tool_add([tool]))
expected_tool = tools[0]
# when
actual_tool = self.storage.tool_get(tool)
# then
self.assertEqual(expected_tool, actual_tool)
@istest
def metadata_provider_get_by(self):
# given
no_provider = self.storage.metadata_provider_get_by({
'provider_name': self.provider['name'],
'provider_url': self.provider['url']
})
self.assertIsNone(no_provider)
# when
provider_id = self.storage.metadata_provider_add(
self.provider['name'],
self.provider['type'],
self.provider['url'],
self.provider['metadata'])
actual_provider = self.storage.metadata_provider_get_by({
'provider_name': self.provider['name'],
'provider_url': self.provider['url']
})
# then
self.assertTrue(provider_id, actual_provider['id'])
@istest
def origin_metadata_add(self):
# given
origin_id = self.storage.origin_add([self.origin])[0]
origin_metadata0 = list(self.storage.origin_metadata_get_by(origin_id))
self.assertTrue(len(origin_metadata0) == 0)
tools = list(self.storage.tool_add([self.metadata_tool]))
tool = tools[0]
self.storage.metadata_provider_add(
self.provider['name'],
self.provider['type'],
self.provider['url'],
self.provider['metadata'])
provider = self.storage.metadata_provider_get_by({
'provider_name': self.provider['name'],
'provider_url': self.provider['url']
})
tool = self.storage.tool_get(self.metadata_tool)
# when adding for the same origin 2 metadatas
o_m1 = self.storage.origin_metadata_add(
origin_id,
self.origin_metadata['discovery_date'],
provider['id'],
tool['id'],
self.origin_metadata['metadata'])
actual_om1 = list(self.storage.origin_metadata_get_by(origin_id))
# then
self.assertEqual(actual_om1[0]['id'], o_m1)
self.assertEqual(len(actual_om1), 1)
self.assertEqual(actual_om1[0]['origin_id'], origin_id)
@istest
def origin_metadata_get(self):
# given
origin_id = self.storage.origin_add([self.origin])[0]
origin_id2 = self.storage.origin_add([self.origin2])[0]
self.storage.metadata_provider_add(self.provider['name'],
self.provider['type'],
self.provider['url'],
self.provider['metadata'])
provider = self.storage.metadata_provider_get_by({
'provider_name': self.provider['name'],
'provider_url': self.provider['url']
})
tool = self.storage.tool_get(self.metadata_tool)
# when adding for the same origin 2 metadatas
o_m1 = self.storage.origin_metadata_add(
origin_id,
self.origin_metadata['discovery_date'],
provider['id'],
tool['id'],
self.origin_metadata['metadata'])
o_m2 = self.storage.origin_metadata_add(
origin_id2,
self.origin_metadata2['discovery_date'],
provider['id'],
tool['id'],
self.origin_metadata2['metadata'])
o_m3 = self.storage.origin_metadata_add(
origin_id,
self.origin_metadata2['discovery_date'],
provider['id'],
tool['id'],
self.origin_metadata2['metadata'])
all_metadatas = list(self.storage.origin_metadata_get_by(origin_id))
metadatas_for_origin2 = list(self.storage.origin_metadata_get_by(
origin_id2))
expected_results = [{
'origin_id': origin_id,
'discovery_date': datetime.datetime(
2017, 1, 2, 0, 0,
tzinfo=psycopg2.tz.FixedOffsetTimezone(
offset=60,
name=None)),
'metadata': {
'name': 'test_origin_metadata',
'version': '0.0.1'
},
'id': o_m3,
'provider_id': provider['id'],
'provider_name': 'hal',
'provider_type': 'deposit-client',
'provider_url': 'http:///hal/inria',
'tool_id': tool['id']
}, {
'origin_id': origin_id,
'discovery_date': datetime.datetime(
2015, 1, 2, 0, 0,
tzinfo=psycopg2.tz.FixedOffsetTimezone(
offset=60,
name=None)),
'metadata': {
'name': 'test_origin_metadata',
'version': '0.0.1'
},
'id': o_m1,
'provider_id': provider['id'],
'provider_name': 'hal',
'provider_type': 'deposit-client',
'provider_url': 'http:///hal/inria',
'tool_id': tool['id']
}]
# then
self.assertEqual(len(all_metadatas), 2)
self.assertEqual(len(metadatas_for_origin2), 1)
self.assertEqual(metadatas_for_origin2[0]['id'], o_m2)
self.assertEqual(all_metadatas, expected_results)
@istest
def origin_metadata_get_by_provider_type(self):
# given
origin_id = self.storage.origin_add([self.origin])[0]
origin_id2 = self.storage.origin_add([self.origin2])[0]
self.storage.metadata_provider_add(
self.provider['name'],
self.provider['type'],
self.provider['url'],
self.provider['metadata'])
provider1 = self.storage.metadata_provider_get_by({
'provider_name': self.provider['name'],
'provider_url': self.provider['url']
})
self.storage.metadata_provider_add(
'swMATH',
'registry',
'http://www.swmath.org/',
{'email': 'contact@swmath.org',
'license': 'All rights reserved'})
provider2 = self.storage.metadata_provider_get_by({
'provider_name': 'swMATH',
'provider_url': 'http://www.swmath.org/'
})
# using the only tool now inserted in the data.sql, but for this
# provider should be a crawler tool (not yet implemented)
tool = self.storage.tool_get(self.metadata_tool)
# when adding for the same origin 2 metadatas
o_m1 = self.storage.origin_metadata_add(
origin_id,
self.origin_metadata['discovery_date'],
provider1['id'],
tool['id'],
self.origin_metadata['metadata'])
o_m2 = self.storage.origin_metadata_add(
origin_id2,
self.origin_metadata2['discovery_date'],
provider2['id'],
tool['id'],
self.origin_metadata2['metadata'])
provider_type = 'registry'
m_by_provider = list(self.storage.
origin_metadata_get_by(
origin_id2,
provider_type))
expected_results = [{
'origin_id': origin_id2,
'discovery_date': datetime.datetime(
2017, 1, 2, 0, 0,
tzinfo=psycopg2.tz.FixedOffsetTimezone(
offset=60,
name=None)),
'metadata': {
'name': 'test_origin_metadata',
'version': '0.0.1'
},
'id': o_m2,
'provider_id': provider2['id'],
'provider_name': 'swMATH',
'provider_type': provider_type,
'provider_url': 'http://www.swmath.org/',
'tool_id': tool['id']
}]
# then
self.assertEqual(len(m_by_provider), 1)
self.assertEqual(m_by_provider, expected_results)
self.assertEqual(m_by_provider[0]['id'], o_m2)
self.assertIsNotNone(o_m1)
class TestLocalStorage(CommonTestStorage, unittest.TestCase):
"""Test the local storage"""
# Can only be tested with local storage as you can't mock
# datetimes for the remote server
@istest
def fetch_history(self):
origin = self.storage.origin_add_one(self.origin)
with patch('datetime.datetime'):
datetime.datetime.now.return_value = self.fetch_history_date
fetch_history_id = self.storage.fetch_history_start(origin)
datetime.datetime.now.assert_called_with(tz=datetime.timezone.utc)
with patch('datetime.datetime'):
datetime.datetime.now.return_value = self.fetch_history_end
self.storage.fetch_history_end(fetch_history_id,
self.fetch_history_data)
fetch_history = self.storage.fetch_history_get(fetch_history_id)
expected_fetch_history = self.fetch_history_data.copy()
expected_fetch_history['id'] = fetch_history_id
expected_fetch_history['origin'] = origin
expected_fetch_history['date'] = self.fetch_history_date
expected_fetch_history['duration'] = self.fetch_history_duration
self.assertEqual(expected_fetch_history, fetch_history)
# The remote API doesn't expose _person_add
@istest
def person_get(self):
# given
person0 = {
'fullname': b'bob <alice@bob>',
'name': b'bob',
'email': b'alice@bob',
}
id0 = self.storage._person_add(person0)
person1 = {
'fullname': b'tony <tony@bob>',
'name': b'tony',
'email': b'tony@bob',
}
id1 = self.storage._person_add(person1)
# when
actual_persons = self.storage.person_get([id0, id1])
# given (person injection through release for example)
self.assertEqual(
list(actual_persons), [
{
'id': id0,
'fullname': person0['fullname'],
'name': person0['name'],
'email': person0['email'],
},
{
'id': id1,
'fullname': person1['fullname'],
'name': person1['name'],
'email': person1['email'],
},
])
class AlteringSchemaTest(BaseTestStorage, unittest.TestCase):
"""This class is dedicated for the rare case where the schema needs to
be altered dynamically.
Otherwise, the tests could be blocking when ran altogether.
"""
@istest
def content_update(self):
cont = copy.deepcopy(self.cont)
self.storage.content_add([cont])
# alter the sha1_git for example
cont['sha1_git'] = hash_to_bytes(
'3a60a5275d0333bf13468e8b3dcab90f4046e654')
self.storage.content_update([cont], keys=['sha1_git'])
self.cursor.execute('SELECT sha1, sha1_git, sha256, length, status'
' FROM content WHERE sha1 = %s',
(cont['sha1'],))
datum = self.cursor.fetchone()
self.assertEqual(
(datum[0].tobytes(), datum[1].tobytes(), datum[2].tobytes(),
datum[3], datum[4]),
(cont['sha1'], cont['sha1_git'], cont['sha256'],
cont['length'], 'visible'))
@istest
def content_update_with_new_cols(self):
self.cursor.execute("""alter table content
add column test text default null,
add column test2 text default null""")
cont = copy.deepcopy(self.cont2)
self.storage.content_add([cont])
cont['test'] = 'value-1'
cont['test2'] = 'value-2'
self.storage.content_update([cont], keys=['test', 'test2'])
self.cursor.execute(
'SELECT sha1, sha1_git, sha256, length, status, test, test2'
' FROM content WHERE sha1 = %s',
(cont['sha1'],))
datum = self.cursor.fetchone()
self.assertEqual(
(datum[0].tobytes(), datum[1].tobytes(), datum[2].tobytes(),
datum[3], datum[4], datum[5], datum[6]),
(cont['sha1'], cont['sha1_git'], cont['sha256'],
cont['length'], 'visible', cont['test'], cont['test2']))
self.cursor.execute("""alter table content drop column test,
drop column test2""")
File Metadata
Details
Attached
Mime Type
text/x-diff
Expires
Fri, Jul 4, 1:18 PM (6 d, 21 h ago)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3452571
Attached To
rDSTOC swh-storage-cassandra
Event Timeline
Log In to Comment