Page Menu
Home
Software Heritage
Search
Configure Global Search
Log In
Files
F9314007
No One
Temporary
Actions
View File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Flag For Later
Size
195 KB
Subscribers
None
View Options
diff --git a/swh/storage/api/client.py b/swh/storage/api/client.py
index 567c165..2476711 100644
--- a/swh/storage/api/client.py
+++ b/swh/storage/api/client.py
@@ -1,279 +1,280 @@
# Copyright (C) 2015 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import pickle
import requests
from requests.exceptions import ConnectionError
from swh.objstorage.api.common import (decode_response,
encode_data_client as encode_data)
from ..exc import StorageAPIError
class RemoteStorage():
"""Proxy to a remote storage API"""
def __init__(self, url):
self.url = url
self.session = requests.Session()
def _url(self, endpoint):
return '%s%s' % (self.url, endpoint)
def post(self, endpoint, data):
try:
response = self.session.post(
self._url(endpoint),
data=encode_data(data),
headers={'content-type': 'application/x-msgpack'},
)
except ConnectionError as e:
print(str(e))
raise StorageAPIError(e)
# XXX: this breaks language-independence and should be
# replaced by proper unserialization
if response.status_code == 400:
raise pickle.loads(decode_response(response))
return decode_response(response)
def get(self, endpoint, data=None):
try:
response = self.session.get(
self._url(endpoint),
params=data,
)
except ConnectionError as e:
print(str(e))
raise StorageAPIError(e)
if response.status_code == 404:
return None
# XXX: this breaks language-independence and should be
# replaced by proper unserialization
if response.status_code == 400:
raise pickle.loads(decode_response(response))
else:
return decode_response(response)
def check_config(self, *, check_write):
return self.post('check_config', {'check_write': check_write})
def content_add(self, content):
return self.post('content/add', {'content': content})
def content_missing(self, content, key_hash='sha1'):
return self.post('content/missing', {'content': content,
'key_hash': key_hash})
def content_missing_per_sha1(self, contents):
return self.post('content/missing/sha1', {'contents': contents})
def content_get(self, content):
return self.post('content/data', {'content': content})
def content_get_metadata(self, content):
return self.post('content/metadata', {'content': content})
def content_find(self, content):
return self.post('content/present', {'content': content})
def content_find_provenance(self, content):
return self.post('content/provenance', {'content': content})
def directory_add(self, directories):
return self.post('directory/add', {'directories': directories})
def directory_missing(self, directories):
return self.post('directory/missing', {'directories': directories})
def directory_get(self, directories):
return self.post('directory', dict(directories=directories))
def directory_ls(self, directory, recursive=False):
return self.get('directory/ls', {'directory': directory,
'recursive': recursive})
def revision_get(self, revisions):
return self.post('revision', {'revisions': revisions})
def revision_get_by(self, origin_id, branch_name, timestamp, limit=None):
return self.post('revision/by', dict(origin_id=origin_id,
branch_name=branch_name,
timestamp=timestamp,
limit=limit))
def revision_log(self, revisions, limit=None):
return self.post('revision/log', {'revisions': revisions,
'limit': limit})
def revision_log_by(self, origin_id, branch_name, timestamp, limit=None):
return self.post('revision/logby', {'origin_id': origin_id,
'branch_name': branch_name,
'timestamp': timestamp,
'limit': limit})
def revision_shortlog(self, revisions, limit=None):
return self.post('revision/shortlog', {'revisions': revisions,
'limit': limit})
def cache_content_revision_add(self, revisions):
return self.post('cache/content_revision', {'revisions': revisions})
def cache_content_get_all(self):
return self.get('cache/contents')
def cache_content_get(self, content):
return self.post('cache/content', {'content': content})
def cache_revision_origin_add(self, origin, visit):
return self.post('cache/revision_origin', {'origin': origin,
'visit': visit})
def revision_add(self, revisions):
return self.post('revision/add', {'revisions': revisions})
def revision_missing(self, revisions):
return self.post('revision/missing', {'revisions': revisions})
def release_add(self, releases):
return self.post('release/add', {'releases': releases})
def release_get(self, releases):
return self.post('release', {'releases': releases})
def release_get_by(self, origin_id, limit=None):
return self.post('release/by', dict(origin_id=origin_id,
limit=limit))
def release_missing(self, releases):
return self.post('release/missing', {'releases': releases})
def object_find_by_sha1_git(self, ids):
return self.post('object/find_by_sha1_git', {'ids': ids})
def occurrence_get(self, origin_id):
return self.post('occurrence', {'origin_id': origin_id})
def occurrence_add(self, occurrences):
return self.post('occurrence/add', {'occurrences': occurrences})
def origin_get(self, origin):
return self.post('origin/get', {'origin': origin})
def origin_add(self, origins):
return self.post('origin/add_multi', {'origins': origins})
def origin_add_one(self, origin):
return self.post('origin/add', {'origin': origin})
def origin_visit_add(self, origin, ts):
return self.post('origin/visit/add', {'origin': origin, 'ts': ts})
def origin_visit_update(self, origin, visit_id, status, metadata=None):
return self.post('origin/visit/update', {'origin': origin,
'visit_id': visit_id,
'status': status,
'metadata': metadata})
- def origin_visit_get(self, origin):
- return self.post('origin/visit/get', {'origin': origin})
+ def origin_visit_get(self, origin, last_visit=None, limit=None):
+ return self.post('origin/visit/get', {
+ 'origin': origin, 'last_visit': last_visit, 'limit': limit})
def origin_visit_get_by(self, origin, visit):
return self.post('origin/visit/getby', {'origin': origin,
'visit': visit})
def person_get(self, person):
return self.post('person', {'person': person})
def fetch_history_start(self, origin_id):
return self.post('fetch_history/start', {'origin_id': origin_id})
def fetch_history_end(self, fetch_history_id, data):
return self.post('fetch_history/end',
{'fetch_history_id': fetch_history_id,
'data': data})
def fetch_history_get(self, fetch_history_id):
return self.get('fetch_history', {'id': fetch_history_id})
def entity_add(self, entities):
return self.post('entity/add', {'entities': entities})
def entity_get(self, uuid):
return self.post('entity/get', {'uuid': uuid})
def entity_get_one(self, uuid):
return self.get('entity', {'uuid': uuid})
def entity_get_from_lister_metadata(self, entities):
return self.post('entity/from_lister_metadata', {'entities': entities})
def stat_counters(self):
return self.get('stat/counters')
def directory_entry_get_by_path(self, directory, paths):
return self.post('directory/path', dict(directory=directory,
paths=paths))
def content_mimetype_add(self, mimetypes, conflict_update=False):
return self.post('content_mimetype/add', {
'mimetypes': mimetypes,
'conflict_update': conflict_update,
})
def content_mimetype_missing(self, mimetypes):
return self.post('content_mimetype/missing', {'mimetypes': mimetypes})
def content_mimetype_get(self, ids):
return self.post('content_mimetype', {'ids': ids})
def content_language_add(self, languages, conflict_update=False):
return self.post('content_language/add', {
'languages': languages,
'conflict_update': conflict_update,
})
def content_language_missing(self, languages):
return self.post('content_language/missing', {'languages': languages})
def content_language_get(self, ids):
return self.post('content_language', {'ids': ids})
def content_ctags_add(self, ctags, conflict_update=False):
return self.post('content/ctags/add', {
'ctags': ctags,
'conflict_update': conflict_update,
})
def content_ctags_missing(self, ctags):
return self.post('content/ctags/missing', {'ctags': ctags})
def content_ctags_get(self, ids):
return self.post('content/ctags', {'ids': ids})
def content_ctags_search(self, expression, limit=10, last_sha1=None):
return self.post('content/ctags/search', {
'expression': expression,
'limit': limit,
'last_sha1': last_sha1,
})
def content_fossology_license_add(self, licenses, conflict_update=False):
return self.post('content/fossology_license/add', {
'licenses': licenses,
'conflict_update': conflict_update,
})
def content_fossology_license_missing(self, licenses):
return self.post('content/fossology_license/missing', {
'licenses': licenses})
def content_fossology_license_get(self, ids):
return self.post('content/fossology_license', {'ids': ids})
diff --git a/swh/storage/db.py b/swh/storage/db.py
index 8baa7f3..73c2214 100644
--- a/swh/storage/db.py
+++ b/swh/storage/db.py
@@ -1,957 +1,966 @@
# Copyright (C) 2015-2016 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import binascii
import datetime
import functools
import json
import psycopg2
import psycopg2.extras
import select
import tempfile
from contextlib import contextmanager
from swh.core import hashutil
TMP_CONTENT_TABLE = 'tmp_content'
psycopg2.extras.register_uuid()
def stored_procedure(stored_proc):
"""decorator to execute remote stored procedure, specified as argument
Generally, the body of the decorated function should be empty. If it is
not, the stored procedure will be executed first; the function body then.
"""
def wrap(meth):
@functools.wraps(meth)
def _meth(self, *args, **kwargs):
cur = kwargs.get('cur', None)
self._cursor(cur).execute('SELECT %s()' % stored_proc)
meth(self, *args, **kwargs)
return _meth
return wrap
def jsonize(value):
"""Convert a value to a psycopg2 JSON object if necessary"""
if isinstance(value, dict):
return psycopg2.extras.Json(value)
return value
def entry_to_bytes(entry):
"""Convert an entry coming from the database to bytes"""
if isinstance(entry, memoryview):
return entry.tobytes()
if isinstance(entry, list):
return [entry_to_bytes(value) for value in entry]
return entry
def line_to_bytes(line):
"""Convert a line coming from the database to bytes"""
if not line:
return line
if isinstance(line, dict):
return {k: entry_to_bytes(v) for k, v in line.items()}
return line.__class__(entry_to_bytes(entry) for entry in line)
def cursor_to_bytes(cursor):
"""Yield all the data from a cursor as bytes"""
yield from (line_to_bytes(line) for line in cursor)
class BaseDb:
"""Base class for swh.storage.*Db.
cf. swh.storage.db.Db, swh.storage.archiver.db.ArchiverDb
"""
@classmethod
def connect(cls, *args, **kwargs):
"""factory method to create a DB proxy
Accepts all arguments of psycopg2.connect; only some specific
possibilities are reported below.
Args:
connstring: libpq2 connection string
"""
conn = psycopg2.connect(*args, **kwargs)
return cls(conn)
def _cursor(self, cur_arg):
"""get a cursor: from cur_arg if given, or a fresh one otherwise
meant to avoid boilerplate if/then/else in methods that proxy stored
procedures
"""
if cur_arg is not None:
return cur_arg
# elif self.cur is not None:
# return self.cur
else:
return self.conn.cursor()
def __init__(self, conn):
"""create a DB proxy
Args:
conn: psycopg2 connection to the SWH DB
"""
self.conn = conn
@contextmanager
def transaction(self):
"""context manager to execute within a DB transaction
Yields:
a psycopg2 cursor
"""
with self.conn.cursor() as cur:
try:
yield cur
self.conn.commit()
except:
if not self.conn.closed:
self.conn.rollback()
raise
def copy_to(self, items, tblname, columns, cur=None, item_cb=None):
"""Copy items' entries to table tblname with columns information.
Args:
items (dict): dictionary of data to copy over tblname
tblname (str): Destination table's name
columns ([str]): keys to access data in items and also the
column names in the destination table.
item_cb (fn): optional function to apply to items's entry
"""
def escape(data):
if data is None:
return ''
if isinstance(data, bytes):
return '\\x%s' % binascii.hexlify(data).decode('ascii')
elif isinstance(data, str):
return '"%s"' % data.replace('"', '""')
elif isinstance(data, datetime.datetime):
# We escape twice to make sure the string generated by
# isoformat gets escaped
return escape(data.isoformat())
elif isinstance(data, dict):
return escape(json.dumps(data))
elif isinstance(data, list):
return escape("{%s}" % ','.join(escape(d) for d in data))
elif isinstance(data, psycopg2.extras.Range):
# We escape twice here too, so that we make sure
# everything gets passed to copy properly
return escape(
'%s%s,%s%s' % (
'[' if data.lower_inc else '(',
'-infinity' if data.lower_inf else escape(data.lower),
'infinity' if data.upper_inf else escape(data.upper),
']' if data.upper_inc else ')',
)
)
else:
# We don't escape here to make sure we pass literals properly
return str(data)
with tempfile.TemporaryFile('w+') as f:
for d in items:
if item_cb is not None:
item_cb(d)
line = [escape(d.get(k)) for k in columns]
f.write(','.join(line))
f.write('\n')
f.seek(0)
self._cursor(cur).copy_expert('COPY %s (%s) FROM STDIN CSV' % (
tblname, ', '.join(columns)), f)
class Db(BaseDb):
"""Proxy to the SWH DB, with wrappers around stored procedures
"""
def mktemp(self, tblname, cur=None):
self._cursor(cur).execute('SELECT swh_mktemp(%s)', (tblname,))
def mktemp_dir_entry(self, entry_type, cur=None):
self._cursor(cur).execute('SELECT swh_mktemp_dir_entry(%s)',
(('directory_entry_%s' % entry_type),))
@stored_procedure('swh_mktemp_revision')
def mktemp_revision(self, cur=None): pass
@stored_procedure('swh_mktemp_release')
def mktemp_release(self, cur=None): pass
@stored_procedure('swh_mktemp_occurrence_history')
def mktemp_occurrence_history(self, cur=None): pass
@stored_procedure('swh_mktemp_entity_lister')
def mktemp_entity_lister(self, cur=None): pass
@stored_procedure('swh_mktemp_entity_history')
def mktemp_entity_history(self, cur=None): pass
@stored_procedure('swh_mktemp_bytea')
def mktemp_bytea(self, cur=None): pass
@stored_procedure('swh_mktemp_content_ctags')
def mktemp_content_ctags(self, cur=None): pass
@stored_procedure('swh_mktemp_content_ctags_missing')
def mktemp_content_ctags_missing(self, cur=None): pass
def register_listener(self, notify_queue, cur=None):
"""Register a listener for NOTIFY queue `notify_queue`"""
self._cursor(cur).execute("LISTEN %s" % notify_queue)
def listen_notifies(self, timeout):
"""Listen to notifications for `timeout` seconds"""
if select.select([self.conn], [], [], timeout) == ([], [], []):
return
else:
self.conn.poll()
while self.conn.notifies:
yield self.conn.notifies.pop(0)
@stored_procedure('swh_content_add')
def content_add_from_temp(self, cur=None): pass
@stored_procedure('swh_directory_add')
def directory_add_from_temp(self, cur=None): pass
@stored_procedure('swh_skipped_content_add')
def skipped_content_add_from_temp(self, cur=None): pass
@stored_procedure('swh_revision_add')
def revision_add_from_temp(self, cur=None): pass
@stored_procedure('swh_release_add')
def release_add_from_temp(self, cur=None): pass
@stored_procedure('swh_occurrence_history_add')
def occurrence_history_add_from_temp(self, cur=None): pass
@stored_procedure('swh_entity_history_add')
def entity_history_add_from_temp(self, cur=None): pass
@stored_procedure('swh_cache_content_revision_add')
def cache_content_revision_add(self, cur=None): pass
def store_tmp_bytea(self, ids, cur=None):
"""Store the given identifiers in a new tmp_bytea table"""
cur = self._cursor(cur)
self.mktemp_bytea(cur)
self.copy_to(({'id': elem} for elem in ids), 'tmp_bytea',
['id'], cur)
content_get_metadata_keys = ['sha1', 'sha1_git', 'sha256', 'length',
'status']
def content_get_metadata_from_temp(self, cur=None):
cur = self._cursor(cur)
cur.execute("""select t.id as sha1, %s from tmp_bytea t
left join content on t.id = content.sha1
""" % ', '.join(self.content_get_metadata_keys[1:]))
yield from cursor_to_bytes(cur)
def content_missing_from_temp(self, cur=None):
cur = self._cursor(cur)
cur.execute("""SELECT sha1, sha1_git, sha256
FROM swh_content_missing()""")
yield from cursor_to_bytes(cur)
def content_missing_per_sha1_from_temp(self, cur=None):
cur = self._cursor(cur)
cur.execute("""SELECT *
FROM swh_content_missing_per_sha1()""")
yield from cursor_to_bytes(cur)
def skipped_content_missing_from_temp(self, cur=None):
cur = self._cursor(cur)
cur.execute("""SELECT sha1, sha1_git, sha256
FROM swh_skipped_content_missing()""")
yield from cursor_to_bytes(cur)
def occurrence_get(self, origin_id, cur=None):
"""Retrieve latest occurrence's information by origin_id.
"""
cur = self._cursor(cur)
cur.execute("""SELECT origin, branch, target, target_type,
(select max(date) from origin_visit
where origin=%s) as date
FROM occurrence
WHERE origin=%s
""",
(origin_id, origin_id))
yield from cursor_to_bytes(cur)
def content_find(self, sha1=None, sha1_git=None, sha256=None, cur=None):
"""Find the content optionally on a combination of the following
checksums sha1, sha1_git or sha256.
Args:
sha1: sha1 content
git_sha1: the sha1 computed `a la git` sha1 of the content
sha256: sha256 content
Returns:
The triplet (sha1, sha1_git, sha256) if found or None.
"""
cur = self._cursor(cur)
cur.execute("""SELECT sha1, sha1_git, sha256, length, ctime, status
FROM swh_content_find(%s, %s, %s)
LIMIT 1""", (sha1, sha1_git, sha256))
content = line_to_bytes(cur.fetchone())
if set(content) == {None}:
return None
else:
return content
provenance_cols = ['content', 'revision', 'origin', 'visit', 'path']
def content_find_provenance(self, sha1_git, cur=None):
"""Find content's provenance information
Args:
sha1: sha1_git content
cur: cursor to use
Returns:
Provenance information on such content
"""
cur = self._cursor(cur)
cur.execute("""SELECT content, revision, origin, visit, path
FROM swh_content_find_provenance(%s)""",
(sha1_git, ))
yield from cursor_to_bytes(cur)
def directory_get_from_temp(self, cur=None):
cur = self._cursor(cur)
cur.execute('''SELECT id, file_entries, dir_entries, rev_entries
FROM swh_directory_get()''')
yield from cursor_to_bytes(cur)
def directory_missing_from_temp(self, cur=None):
cur = self._cursor(cur)
cur.execute('SELECT * FROM swh_directory_missing()')
yield from cursor_to_bytes(cur)
directory_ls_cols = ['dir_id', 'type', 'target', 'name', 'perms',
'status', 'sha1', 'sha1_git', 'sha256']
def directory_walk_one(self, directory, cur=None):
cur = self._cursor(cur)
cur.execute('SELECT * FROM swh_directory_walk_one(%s)', (directory,))
yield from cursor_to_bytes(cur)
def directory_walk(self, directory, cur=None):
cur = self._cursor(cur)
cur.execute('SELECT * FROM swh_directory_walk(%s)', (directory,))
yield from cursor_to_bytes(cur)
def revision_missing_from_temp(self, cur=None):
cur = self._cursor(cur)
cur.execute('SELECT id FROM swh_revision_missing() as r(id)')
yield from cursor_to_bytes(cur)
revision_add_cols = [
'id', 'date', 'date_offset', 'date_neg_utc_offset', 'committer_date',
'committer_date_offset', 'committer_date_neg_utc_offset', 'type',
'directory', 'message', 'author_fullname', 'author_name',
'author_email', 'committer_fullname', 'committer_name',
'committer_email', 'metadata', 'synthetic',
]
revision_get_cols = revision_add_cols + [
'author_id', 'committer_id', 'parents']
def origin_visit_add(self, origin, ts, cur=None):
"""Add a new origin_visit for origin origin at timestamp ts with
status 'ongoing'.
Args:
origin: origin concerned by the visit
ts: the date of the visit
Returns:
The new visit index step for that origin
"""
cur = self._cursor(cur)
self._cursor(cur).execute('SELECT swh_origin_visit_add(%s, %s)',
(origin, ts))
return cur.fetchone()[0]
def origin_visit_update(self, origin, visit_id, status,
metadata, cur=None):
"""Update origin_visit's status."""
cur = self._cursor(cur)
update = """UPDATE origin_visit
SET status=%s, metadata=%s
WHERE origin=%s AND visit=%s"""
cur.execute(update, (status, jsonize(metadata), origin, visit_id))
origin_visit_get_cols = ['origin', 'visit', 'date', 'status', 'metadata']
- def origin_visit_get_all(self, origin_id, cur=None):
+ def origin_visit_get_all(self, origin_id,
+ last_visit=None, limit=None, cur=None):
"""Retrieve all visits for origin with id origin_id.
Args:
origin_id: The occurrence's origin
Yields:
The occurrence's history visits
"""
cur = self._cursor(cur)
+ query_suffix = ''
+ if last_visit:
+ query_suffix += ' AND %s < visit' % last_visit
+
+ if limit:
+ query_suffix += ' LIMIT %s' % limit
+
query = """\
SELECT %s
FROM origin_visit
- WHERE origin=%%s""" % (', '.join(self.origin_visit_get_cols))
+ WHERE origin=%%s %s""" % (
+ ', '.join(self.origin_visit_get_cols), query_suffix)
cur.execute(query, (origin_id, ))
yield from cursor_to_bytes(cur)
def origin_visit_get(self, origin_id, visit_id, cur=None):
"""Retrieve information on visit visit_id of origin origin_id.
Args:
origin_id: the origin concerned
visit_id: The visit step for that origin
Returns:
The origin_visit information
"""
cur = self._cursor(cur)
query = """\
SELECT %s
FROM origin_visit
WHERE origin = %%s AND visit = %%s
""" % (', '.join(self.origin_visit_get_cols))
cur.execute(query, (origin_id, visit_id))
r = cur.fetchall()
if not r:
return None
return line_to_bytes(r[0])
occurrence_cols = ['origin', 'branch', 'target', 'target_type']
def occurrence_by_origin_visit(self, origin_id, visit_id, cur=None):
"""Retrieve all occurrences for a particular origin_visit.
Args:
origin_id: the origin concerned
visit_id: The visit step for that origin
Yields:
The occurrence's history visits
"""
cur = self._cursor(cur)
query = """\
SELECT %s
FROM swh_occurrence_by_origin_visit(%%s, %%s)
""" % (', '.join(self.occurrence_cols))
cur.execute(query, (origin_id, visit_id))
yield from cursor_to_bytes(cur)
def revision_get_from_temp(self, cur=None):
cur = self._cursor(cur)
query = 'SELECT %s FROM swh_revision_get()' % (
', '.join(self.revision_get_cols))
cur.execute(query)
yield from cursor_to_bytes(cur)
def revision_log(self, root_revisions, limit=None, cur=None):
cur = self._cursor(cur)
query = """SELECT %s
FROM swh_revision_log(%%s, %%s)
""" % ', '.join(self.revision_get_cols)
cur.execute(query, (root_revisions, limit))
yield from cursor_to_bytes(cur)
revision_shortlog_cols = ['id', 'parents']
def revision_shortlog(self, root_revisions, limit=None, cur=None):
cur = self._cursor(cur)
query = """SELECT %s
FROM swh_revision_list(%%s, %%s)
""" % ', '.join(self.revision_shortlog_cols)
cur.execute(query, (root_revisions, limit))
yield from cursor_to_bytes(cur)
cache_content_get_cols = [
'sha1', 'sha1_git', 'sha256', 'revision_paths']
def cache_content_get_all(self, cur=None):
"""Retrieve cache contents' sha1, sha256, sha1_git
"""
cur = self._cursor(cur)
cur.execute('SELECT * FROM swh_cache_content_get_all()')
yield from cursor_to_bytes(cur)
def cache_content_get(self, sha1_git, cur=None):
"""Retrieve cache content information sh.
"""
cur = self._cursor(cur)
cur.execute('SELECT * FROM swh_cache_content_get(%s)', (sha1_git, ))
data = cur.fetchone()
if data:
return line_to_bytes(data)
return None
def cache_revision_origin_add(self, origin, visit, cur=None):
"""Populate the content provenance information cache for the given
(origin, visit) couple."""
cur = self._cursor(cur)
cur.execute('SELECT * FROM swh_cache_revision_origin_add(%s, %s)',
(origin, visit))
yield from cursor_to_bytes(cur)
def release_missing_from_temp(self, cur=None):
cur = self._cursor(cur)
cur.execute('SELECT id FROM swh_release_missing() as r(id)')
yield from cursor_to_bytes(cur)
object_find_by_sha1_git_cols = ['sha1_git', 'type', 'id', 'object_id']
def object_find_by_sha1_git(self, ids, cur=None):
cur = self._cursor(cur)
self.store_tmp_bytea(ids, cur)
query = 'select %s from swh_object_find_by_sha1_git()' % (
', '.join(self.object_find_by_sha1_git_cols)
)
cur.execute(query)
yield from cursor_to_bytes(cur)
def stat_counters(self, cur=None):
cur = self._cursor(cur)
cur.execute('SELECT * FROM swh_stat_counters()')
yield from cur
fetch_history_cols = ['origin', 'date', 'status', 'result', 'stdout',
'stderr', 'duration']
def create_fetch_history(self, fetch_history, cur=None):
"""Create a fetch_history entry with the data in fetch_history"""
cur = self._cursor(cur)
query = '''INSERT INTO fetch_history (%s)
VALUES (%s) RETURNING id''' % (
','.join(self.fetch_history_cols),
','.join(['%s'] * len(self.fetch_history_cols))
)
cur.execute(query, [fetch_history.get(col) for col in
self.fetch_history_cols])
return cur.fetchone()[0]
def get_fetch_history(self, fetch_history_id, cur=None):
"""Get a fetch_history entry with the given id"""
cur = self._cursor(cur)
query = '''SELECT %s FROM fetch_history WHERE id=%%s''' % (
', '.join(self.fetch_history_cols),
)
cur.execute(query, (fetch_history_id,))
data = cur.fetchone()
if not data:
return None
ret = {'id': fetch_history_id}
for i, col in enumerate(self.fetch_history_cols):
ret[col] = data[i]
return ret
def update_fetch_history(self, fetch_history, cur=None):
"""Update the fetch_history entry from the data in fetch_history"""
cur = self._cursor(cur)
query = '''UPDATE fetch_history
SET %s
WHERE id=%%s''' % (
','.join('%s=%%s' % col for col in self.fetch_history_cols)
)
cur.execute(query, [jsonize(fetch_history.get(col)) for col in
self.fetch_history_cols + ['id']])
base_entity_cols = ['uuid', 'parent', 'name', 'type',
'description', 'homepage', 'active',
'generated', 'lister_metadata',
'metadata']
entity_cols = base_entity_cols + ['last_seen', 'last_id']
entity_history_cols = base_entity_cols + ['id', 'validity']
def origin_add(self, type, url, cur=None):
"""Insert a new origin and return the new identifier."""
insert = """INSERT INTO origin (type, url) values (%s, %s)
RETURNING id"""
cur.execute(insert, (type, url))
return cur.fetchone()[0]
def origin_get_with(self, type, url, cur=None):
"""Retrieve the origin id from its type and url if found."""
cur = self._cursor(cur)
query = """SELECT id, type, url, lister, project
FROM origin
WHERE type=%s AND url=%s"""
cur.execute(query, (type, url))
data = cur.fetchone()
if data:
return line_to_bytes(data)
return None
def origin_get(self, id, cur=None):
"""Retrieve the origin per its identifier.
"""
cur = self._cursor(cur)
query = "SELECT id, type, url, lister, project FROM origin WHERE id=%s"
cur.execute(query, (id,))
data = cur.fetchone()
if data:
return line_to_bytes(data)
return None
person_cols = ['fullname', 'name', 'email']
person_get_cols = person_cols + ['id']
def person_add(self, person, cur=None):
"""Add a person identified by its name and email.
Returns:
The new person's id
"""
cur = self._cursor(cur)
query_new_person = '''\
INSERT INTO person(%s)
VALUES (%s)
RETURNING id''' % (
', '.join(self.person_cols),
', '.join('%s' for i in range(len(self.person_cols)))
)
cur.execute(query_new_person,
[person[col] for col in self.person_cols])
return cur.fetchone()[0]
def person_get(self, ids, cur=None):
"""Retrieve the persons identified by the list of ids.
"""
cur = self._cursor(cur)
query = """SELECT %s
FROM person
WHERE id IN %%s""" % ', '.join(self.person_get_cols)
cur.execute(query, (tuple(ids),))
yield from cursor_to_bytes(cur)
release_add_cols = [
'id', 'target', 'target_type', 'date', 'date_offset',
'date_neg_utc_offset', 'name', 'comment', 'synthetic',
'author_fullname', 'author_name', 'author_email',
]
release_get_cols = release_add_cols + ['author_id']
def release_get_from_temp(self, cur=None):
cur = self._cursor(cur)
query = '''
SELECT %s
FROM swh_release_get()
''' % ', '.join(self.release_get_cols)
cur.execute(query)
yield from cursor_to_bytes(cur)
def release_get_by(self,
origin_id,
limit=None,
cur=None):
"""Retrieve a release by occurrence criterion (only origin right now)
Args:
- origin_id: The origin to look for.
"""
cur = self._cursor(cur)
query = """
SELECT %s
FROM swh_release_get_by(%%s)
LIMIT %%s
""" % ', '.join(self.release_get_cols)
cur.execute(query, (origin_id, limit))
yield from cursor_to_bytes(cur)
def revision_get_by(self,
origin_id,
branch_name,
datetime,
limit=None,
cur=None):
"""Retrieve a revision by occurrence criterion.
Args:
- origin_id: The origin to look for
- branch_name: the branch name to look for
- datetime: the lower bound of timerange to look for.
- limit: limit number of results to return
The upper bound being now.
"""
cur = self._cursor(cur)
if branch_name and isinstance(branch_name, str):
branch_name = branch_name.encode('utf-8')
query = '''
SELECT %s
FROM swh_revision_get_by(%%s, %%s, %%s)
LIMIT %%s
''' % ', '.join(self.revision_get_cols)
cur.execute(query, (origin_id, branch_name, datetime, limit))
yield from cursor_to_bytes(cur)
def directory_entry_get_by_path(self, directory, paths, cur=None):
"""Retrieve a directory entry by path.
"""
cur = self._cursor(cur)
cur.execute("""SELECT dir_id, type, target, name, perms, status, sha1,
sha1_git, sha256
FROM swh_find_directory_entry_by_path(%s, %s)""",
(directory, paths))
data = cur.fetchone()
if set(data) == {None}:
return None
return line_to_bytes(data)
def entity_get(self, uuid, cur=None):
"""Retrieve the entity and its parent hierarchy chain per uuid.
"""
cur = self._cursor(cur)
cur.execute("""SELECT %s
FROM swh_entity_get(%%s)""" % (
', '.join(self.entity_cols)),
(uuid, ))
yield from cursor_to_bytes(cur)
def entity_get_one(self, uuid, cur=None):
"""Retrieve a single entity given its uuid.
"""
cur = self._cursor(cur)
cur.execute("""SELECT %s
FROM entity
WHERE uuid = %%s""" % (
', '.join(self.entity_cols)),
(uuid, ))
data = cur.fetchone()
if not data:
return None
return line_to_bytes(data)
content_mimetype_cols = ['id', 'mimetype', 'encoding',
'tool_name', 'tool_version']
@stored_procedure('swh_mktemp_content_mimetype_missing')
def mktemp_content_mimetype_missing(self, cur=None): pass
def content_mimetype_missing_from_temp(self, cur=None):
"""List missing mimetypes.
"""
cur = self._cursor(cur)
cur.execute("SELECT * FROM swh_content_mimetype_missing()")
yield from cursor_to_bytes(cur)
@stored_procedure('swh_mktemp_content_mimetype')
def mktemp_content_mimetype(self, cur=None): pass
def content_mimetype_add_from_temp(self, conflict_update, cur=None):
self._cursor(cur).execute("SELECT swh_content_mimetype_add(%s)",
(conflict_update, ))
content_language_cols = ['id', 'lang', 'tool_name', 'tool_version']
@stored_procedure('swh_mktemp_content_language')
def mktemp_content_language(self, cur=None): pass
def content_mimetype_get_from_temp(self, cur=None):
cur = self._cursor(cur)
query = "SELECT %s FROM swh_content_mimetype_get()" % (
','.join(self.content_mimetype_cols))
cur.execute(query)
yield from cursor_to_bytes(cur)
@stored_procedure('swh_mktemp_content_language_missing')
def mktemp_content_language_missing(self, cur=None): pass
def content_language_missing_from_temp(self, cur=None):
"""List missing languages.
"""
cur = self._cursor(cur)
cur.execute("SELECT * FROM swh_content_language_missing()")
yield from cursor_to_bytes(cur)
def content_language_add_from_temp(self, conflict_update, cur=None):
self._cursor(cur).execute("SELECT swh_content_language_add(%s)",
(conflict_update, ))
def content_language_get_from_temp(self, cur=None):
cur = self._cursor(cur)
query = "SELECT %s FROM swh_content_language_get()" % (
','.join(self.content_language_cols))
cur.execute(query)
yield from cursor_to_bytes(cur)
def content_ctags_missing_from_temp(self, cur=None):
"""List missing ctags.
"""
cur = self._cursor(cur)
cur.execute("SELECT * FROM swh_content_ctags_missing()")
yield from cursor_to_bytes(cur)
def content_ctags_add_from_temp(self, conflict_update, cur=None):
self._cursor(cur).execute("SELECT swh_content_ctags_add(%s)",
(conflict_update, ))
content_ctags_cols = ['id', 'name', 'kind', 'line', 'lang',
'tool_name', 'tool_version']
def content_ctags_get_from_temp(self, cur=None):
cur = self._cursor(cur)
query = "SELECT %s FROM swh_content_ctags_get()" % (
','.join(self.content_ctags_cols))
cur.execute(query)
yield from cursor_to_bytes(cur)
def content_ctags_search(self, expression, last_sha1, limit, cur=None):
cur = self._cursor(cur)
if not last_sha1:
query = """SELECT %s
FROM swh_content_ctags_search(%%s, %%s)""" % (
','.join(self.content_ctags_cols))
cur.execute(query, (expression, limit))
else:
if last_sha1 and isinstance(last_sha1, bytes):
last_sha1 = '\\x%s' % hashutil.hash_to_hex(last_sha1)
elif last_sha1:
last_sha1 = '\\x%s' % last_sha1
query = """SELECT %s
FROM swh_content_ctags_search(%%s, %%s, %%s)""" % (
','.join(self.content_ctags_cols))
cur.execute(query, (expression, limit, last_sha1))
yield from cursor_to_bytes(cur)
content_fossology_license_cols = ['id', 'tool_name', 'tool_version',
'licenses']
@stored_procedure('swh_mktemp_content_fossology_license_missing')
def mktemp_content_fossology_license_missing(self, cur=None): pass
def content_fossology_license_missing_from_temp(self, cur=None):
"""List missing licenses.
"""
cur = self._cursor(cur)
cur.execute("SELECT * FROM swh_content_fossology_license_missing()")
yield from cursor_to_bytes(cur)
@stored_procedure('swh_mktemp_content_fossology_license')
def mktemp_content_fossology_license(self, cur=None): pass
@stored_procedure('swh_mktemp_content_fossology_license_unknown')
def mktemp_content_fossology_license_unknown(self, cur=None): pass
def content_fossology_license_add_from_temp(self, conflict_update,
cur=None):
"""Add new licenses per content.
"""
self._cursor(cur).execute(
"SELECT swh_content_fossology_license_add(%s)",
(conflict_update, ))
def content_fossology_license_get_from_temp(self, cur=None):
"""Retrieve licenses per content.
"""
cur = self._cursor(cur)
query = "SELECT %s FROM swh_content_fossology_license_get()" % (
','.join(self.content_fossology_license_cols))
cur.execute(query)
yield from cursor_to_bytes(cur)
def content_fossology_license_unknown(self, cur=None):
"""Returns the unknown licenses from
tmp_content_fossology_license_unknown.
"""
cur = self._cursor(cur)
cur.execute("SELECT * FROM swh_content_fossology_license_unknown()")
yield from cursor_to_bytes(cur)
diff --git a/swh/storage/storage.py b/swh/storage/storage.py
index 2d8e267..8891868 100644
--- a/swh/storage/storage.py
+++ b/swh/storage/storage.py
@@ -1,1569 +1,1574 @@
# Copyright (C) 2015-2016 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
from collections import defaultdict
import datetime
import itertools
import dateutil.parser
import psycopg2
from . import converters
from .common import db_transaction_generator, db_transaction
from .db import Db
from .exc import StorageDBError
from swh.core.hashutil import ALGORITHMS
from swh.objstorage import get_objstorage
from swh.objstorage.exc import ObjNotFoundError
# Max block size of contents to return
BULK_BLOCK_CONTENT_LEN_MAX = 10000
class Storage():
"""SWH storage proxy, encompassing DB and object storage
"""
def __init__(self, db, objstorage):
"""
Args:
db_conn: either a libpq connection string, or a psycopg2 connection
obj_root: path to the root of the object storage
"""
try:
if isinstance(db, psycopg2.extensions.connection):
self.db = Db(db)
else:
self.db = Db.connect(db)
except psycopg2.OperationalError as e:
raise StorageDBError(e)
self.objstorage = get_objstorage(**objstorage)
def check_config(self, *, check_write):
"""Check that the storage is configured and ready to go."""
if not self.objstorage.check_config(check_write=check_write):
return False
# Check permissions on one of the tables
with self.db.transaction() as cur:
if check_write:
check = 'INSERT'
else:
check = 'SELECT'
cur.execute(
"select has_table_privilege(current_user, 'content', %s)",
(check,)
)
return cur.fetchone()[0]
return True
def content_add(self, content):
"""Add content blobs to the storage
Note: in case of DB errors, objects might have already been added to
the object storage and will not be removed. Since addition to the
object storage is idempotent, that should not be a problem.
Args:
content: iterable of dictionaries representing individual pieces of
content to add. Each dictionary has the following keys:
- data (bytes): the actual content
- length (int): content length (default: -1)
- one key for each checksum algorithm in
swh.core.hashutil.ALGORITHMS, mapped to the corresponding
checksum
- status (str): one of visible, hidden, absent
- reason (str): if status = absent, the reason why
- origin (int): if status = absent, the origin we saw the
content in
"""
db = self.db
content_by_status = defaultdict(list)
for d in content:
if 'status' not in d:
d['status'] = 'visible'
if 'length' not in d:
d['length'] = -1
content_by_status[d['status']].append(d)
content_with_data = content_by_status['visible']
content_without_data = content_by_status['absent']
missing_content = set(self.content_missing(content_with_data))
missing_skipped = set(
sha1_git for sha1, sha1_git, sha256
in self.skipped_content_missing(content_without_data))
with db.transaction() as cur:
if missing_content:
# create temporary table for metadata injection
db.mktemp('content', cur)
def add_to_objstorage(cont):
self.objstorage.add(cont['data'],
obj_id=cont['sha1'])
content_filtered = (cont for cont in content_with_data
if cont['sha1'] in missing_content)
db.copy_to(content_filtered, 'tmp_content',
['sha1', 'sha1_git', 'sha256', 'length', 'status'],
cur, item_cb=add_to_objstorage)
# move metadata in place
db.content_add_from_temp(cur)
if missing_skipped:
missing_filtered = (cont for cont in content_without_data
if cont['sha1_git'] in missing_skipped)
db.mktemp('skipped_content', cur)
db.copy_to(missing_filtered, 'tmp_skipped_content',
['sha1', 'sha1_git', 'sha256', 'length',
'reason', 'status', 'origin'], cur)
# move metadata in place
db.skipped_content_add_from_temp(cur)
def content_get(self, content):
"""Retrieve in bulk contents and their data.
Args:
content: iterables of sha1
Returns:
Generates streams of contents as dict with their raw data:
- sha1: sha1's content
- data: bytes data of the content
Raises:
ValueError in case of too much contents are required.
cf. BULK_BLOCK_CONTENT_LEN_MAX
"""
# FIXME: Improve on server module to slice the result
if len(content) > BULK_BLOCK_CONTENT_LEN_MAX:
raise ValueError(
"Send at maximum %s contents." % BULK_BLOCK_CONTENT_LEN_MAX)
for obj_id in content:
try:
data = self.objstorage.get(obj_id)
except ObjNotFoundError:
yield None
continue
yield {'sha1': obj_id, 'data': data}
@db_transaction_generator
def content_get_metadata(self, content, cur=None):
"""Retrieve content metadata in bulk
Args:
content: iterable of content identifiers (sha1)
Returns:
an iterable with content metadata corresponding to the given ids
"""
db = self.db
db.store_tmp_bytea(content, cur)
for content_metadata in db.content_get_metadata_from_temp(cur):
yield dict(zip(db.content_get_metadata_keys, content_metadata))
@db_transaction_generator
def content_missing(self, content, key_hash='sha1', cur=None):
"""List content missing from storage
Args:
content: iterable of dictionaries containing one key for each
checksum algorithm in swh.core.hashutil.ALGORITHMS, mapped to
the corresponding checksum, and a length key mapped to the
content length.
key_hash: the name of the hash used as key (default: 'sha1')
Returns:
an iterable of `key_hash`es missing from the storage
Raises:
TODO: an exception when we get a hash collision.
"""
db = self.db
keys = ['sha1', 'sha1_git', 'sha256']
if key_hash not in keys:
raise ValueError("key_hash should be one of %s" % keys)
key_hash_idx = keys.index(key_hash)
# Create temporary table for metadata injection
db.mktemp('content', cur)
db.copy_to(content, 'tmp_content', keys + ['length'], cur)
for obj in db.content_missing_from_temp(cur):
yield obj[key_hash_idx]
@db_transaction_generator
def content_missing_per_sha1(self, contents, cur=None):
"""List content missing from storage based only on sha1.
Args:
contents: Iterable of sha1 to check for absence.
Returns:
an iterable of `sha1`s missing from the storage.
Raises:
TODO: an exception when we get a hash collision.
"""
db = self.db
db.store_tmp_bytea(contents, cur)
for obj in db.content_missing_per_sha1_from_temp(cur):
yield obj[0]
@db_transaction_generator
def skipped_content_missing(self, content, cur=None):
"""List skipped_content missing from storage
Args:
content: iterable of dictionaries containing the data for each
checksum algorithm.
Returns:
an iterable of signatures missing from the storage
"""
keys = ['sha1', 'sha1_git', 'sha256']
db = self.db
db.mktemp('skipped_content', cur)
db.copy_to(content, 'tmp_skipped_content',
keys + ['length', 'reason'], cur)
yield from db.skipped_content_missing_from_temp(cur)
@db_transaction
def content_find(self, content, cur=None):
"""Find a content hash in db.
Args:
content: a dictionary representing one content hash, mapping
checksum algorithm names (see swh.core.hashutil.ALGORITHMS) to
checksum values
Returns:
a triplet (sha1, sha1_git, sha256) if the content exist
or None otherwise.
Raises:
ValueError in case the key of the dictionary is not sha1, sha1_git
nor sha256.
"""
db = self.db
if not set(content).intersection(ALGORITHMS):
raise ValueError('content keys must contain at least one of: '
'sha1, sha1_git, sha256')
c = db.content_find(sha1=content.get('sha1'),
sha1_git=content.get('sha1_git'),
sha256=content.get('sha256'),
cur=cur)
if c:
keys = ['sha1', 'sha1_git', 'sha256', 'length', 'ctime', 'status']
return dict(zip(keys, c))
return None
@db_transaction_generator
def content_find_provenance(self, content, cur=None):
"""Find content's provenance information.
Args:
content: a dictionary entry representing one content hash.
The dictionary key is one of swh.core.hashutil.ALGORITHMS.
The value mapped to the corresponding checksum.
Yields:
The provenance information on content.
"""
db = self.db
c = self.content_find(content)
if not c:
return []
sha1_git = c['sha1_git']
for provenance in db.content_find_provenance(sha1_git, cur=cur):
yield dict(zip(db.provenance_cols, provenance))
def directory_add(self, directories):
"""Add directories to the storage
Args:
directories: iterable of dictionaries representing the individual
directories to add. Each dict has the following keys:
- id (sha1_git): the id of the directory to add
- entries (list): list of dicts for each entry in the
directory. Each dict has the following keys:
- name (bytes)
- type (one of 'file', 'dir', 'rev'):
type of the directory entry (file, directory, revision)
- target (sha1_git): id of the object pointed at by the
directory entry
- perms (int): entry permissions
"""
dirs = set()
dir_entries = {
'file': defaultdict(list),
'dir': defaultdict(list),
'rev': defaultdict(list),
}
for cur_dir in directories:
dir_id = cur_dir['id']
dirs.add(dir_id)
for src_entry in cur_dir['entries']:
entry = src_entry.copy()
entry['dir_id'] = dir_id
dir_entries[entry['type']][dir_id].append(entry)
dirs_missing = set(self.directory_missing(dirs))
if not dirs_missing:
return
db = self.db
with db.transaction() as cur:
# Copy directory ids
dirs_missing_dict = ({'id': dir} for dir in dirs_missing)
db.mktemp('directory', cur)
db.copy_to(dirs_missing_dict, 'tmp_directory', ['id'], cur)
# Copy entries
for entry_type, entry_list in dir_entries.items():
entries = itertools.chain.from_iterable(
entries_for_dir
for dir_id, entries_for_dir
in entry_list.items()
if dir_id in dirs_missing)
db.mktemp_dir_entry(entry_type)
db.copy_to(
entries,
'tmp_directory_entry_%s' % entry_type,
['target', 'name', 'perms', 'dir_id'],
cur,
)
# Do the final copy
db.directory_add_from_temp(cur)
@db_transaction_generator
def directory_missing(self, directories, cur):
"""List directories missing from storage
Args: an iterable of directory ids
Returns: a list of missing directory ids
"""
db = self.db
# Create temporary table for metadata injection
db.mktemp('directory', cur)
directories_dicts = ({'id': dir} for dir in directories)
db.copy_to(directories_dicts, 'tmp_directory', ['id'], cur)
for obj in db.directory_missing_from_temp(cur):
yield obj[0]
@db_transaction_generator
def directory_get(self,
directories,
cur=None):
"""Get information on directories.
Args:
- directories: an iterable of directory ids
Returns:
List of directories as dict with keys and associated values.
"""
db = self.db
keys = ('id', 'dir_entries', 'file_entries', 'rev_entries')
db.mktemp('directory', cur)
db.copy_to(({'id': dir_id} for dir_id in directories),
'tmp_directory', ['id'], cur)
dirs = db.directory_get_from_temp(cur)
for line in dirs:
yield dict(zip(keys, line))
@db_transaction_generator
def directory_ls(self, directory, recursive=False, cur=None):
"""Get entries for one directory.
Args:
- directory: the directory to list entries from.
- recursive: if flag on, this list recursively from this directory.
Returns:
List of entries for such directory.
"""
db = self.db
if recursive:
res_gen = db.directory_walk(directory)
else:
res_gen = db.directory_walk_one(directory)
for line in res_gen:
yield dict(zip(db.directory_ls_cols, line))
@db_transaction
def cache_content_revision_add(self, revisions, cur=None):
"""Cache the current revision's current targeted arborescence directory.
If the revision has already been cached, it just does nothing.
Args:
- revisions: the revisions to cache
Returns:
None
"""
db = self.db
db.store_tmp_bytea(revisions, cur)
db.cache_content_revision_add()
@db_transaction_generator
def cache_content_get_all(self, cur=None):
"""Read the distinct contents in the cache table.
Yields:
contents from cache
"""
for content in self.db.cache_content_get_all(cur):
yield dict(zip(self.db.cache_content_get_cols, content))
@db_transaction
def cache_content_get(self, content, cur=None):
"""Retrieve information on content.
Args:
content (dict): content with checkums
Returns:
Its properties (sha1, sha1_git, sha256, revision_paths)
"""
if 'sha1_git' in content:
sha1_git = content['sha1_git']
else:
c = self.content_find(content)
if not c:
return None
sha1_git = c['sha1_git']
c = self.db.cache_content_get(sha1_git, cur=cur)
if not c:
return None
return dict(zip(self.db.cache_content_get_cols, c))
@db_transaction_generator
def cache_revision_origin_add(self, origin, visit, cur=None):
"""Cache the list of revisions the given visit added to the origin.
Args:
- origin: the id of the origin
- visit: the id of the visit
Returns:
The list of new revisions
"""
for (revision,) in self.db.cache_revision_origin_add(origin, visit):
yield revision
@db_transaction
def directory_entry_get_by_path(self, directory, paths, cur=None):
"""Get the directory entry (either file or dir) from directory with
path.
Args:
- directory: sha1 of the top level directory
- paths: path to lookup from the top level directory. From left
(top) to right (bottom).
Returns:
The corresponding directory entry if found, None otherwise.
"""
db = self.db
keys = ('dir_id', 'type', 'target', 'name', 'perms', 'status',
'sha1', 'sha1_git', 'sha256')
res = db.directory_entry_get_by_path(directory, paths, cur)
if res:
return dict(zip(keys, res))
def revision_add(self, revisions):
"""Add revisions to the storage
Args:
revisions: iterable of dictionaries representing the individual
revisions to add. Each dict has the following keys:
- id (sha1_git): id of the revision to add
- date (datetime.DateTime): date the revision was written
- date_offset (int): offset from UTC in minutes the revision
was written
- date_neg_utc_offset (boolean): whether a null date_offset
represents a negative UTC offset
- committer_date (datetime.DateTime): date the revision got
added to the origin
- committer_date_offset (int): offset from UTC in minutes the
revision was added to the origin
- committer_date_neg_utc_offset (boolean): whether a null
committer_date_offset represents a negative UTC offset
- type (one of 'git', 'tar'): type of the revision added
- directory (sha1_git): the directory the revision points at
- message (bytes): the message associated with the revision
- author_name (bytes): the name of the revision author
- author_email (bytes): the email of the revision author
- committer_name (bytes): the name of the revision committer
- committer_email (bytes): the email of the revision committer
- metadata (jsonb): extra information as dictionary
- synthetic (bool): revision's nature (tarball, directory
creates synthetic revision)
- parents (list of sha1_git): the parents of this revision
"""
db = self.db
revisions_missing = set(self.revision_missing(
set(revision['id'] for revision in revisions)))
if not revisions_missing:
return
with db.transaction() as cur:
db.mktemp_revision(cur)
revisions_filtered = (
converters.revision_to_db(revision) for revision in revisions
if revision['id'] in revisions_missing)
parents_filtered = []
db.copy_to(
revisions_filtered, 'tmp_revision', db.revision_add_cols,
cur,
lambda rev: parents_filtered.extend(rev['parents']))
db.revision_add_from_temp(cur)
db.copy_to(parents_filtered, 'revision_history',
['id', 'parent_id', 'parent_rank'], cur)
@db_transaction_generator
def revision_missing(self, revisions, cur=None):
"""List revisions missing from storage
Args: an iterable of revision ids
Returns: a list of missing revision ids
"""
db = self.db
db.store_tmp_bytea(revisions, cur)
for obj in db.revision_missing_from_temp(cur):
yield obj[0]
@db_transaction_generator
def revision_get(self, revisions, cur):
"""Get all revisions from storage
Args: an iterable of revision ids
Returns: an iterable of revisions as dictionaries
(or None if the revision doesn't exist)
"""
db = self.db
db.store_tmp_bytea(revisions, cur)
for line in self.db.revision_get_from_temp(cur):
data = converters.db_to_revision(
dict(zip(db.revision_get_cols, line))
)
if not data['type']:
yield None
continue
yield data
@db_transaction_generator
def revision_log(self, revisions, limit=None, cur=None):
"""Fetch revision entry from the given root revisions.
Args:
- revisions: array of root revision to lookup
- limit: limitation on the output result. Default to null.
Yields:
List of revision log from such revisions root.
"""
db = self.db
for line in db.revision_log(revisions, limit, cur):
data = converters.db_to_revision(
dict(zip(db.revision_get_cols, line))
)
if not data['type']:
yield None
continue
yield data
@db_transaction_generator
def revision_shortlog(self, revisions, limit=None, cur=None):
"""Fetch the shortlog for the given revisions
Args:
revisions: list of root revisions to lookup
limit: depth limitation for the output
Yields:
a list of (id, parents) tuples.
"""
db = self.db
yield from db.revision_shortlog(revisions, limit, cur)
@db_transaction_generator
def revision_log_by(self, origin_id, branch_name=None, timestamp=None,
limit=None, cur=None):
"""Fetch revision entry from the actual origin_id's latest revision.
Args:
- origin_id: the origin id from which deriving the revision
- branch_name: (optional) occurrence's branch name
- timestamp: (optional) occurrence's time
- limit: (optional) depth limitation for the
output. Default to None.
Yields:
The revision log starting from the revision derived from
the (origin, branch_name, timestamp) combination if any.
Returns the [] if no revision matching this combination is
found.
"""
db = self.db
# Retrieve the revision by criterion
revisions = list(db.revision_get_by(
origin_id, branch_name, timestamp, limit=1))
if not revisions:
return None
revision_id = revisions[0][0]
# otherwise, retrieve the revision log from that revision
yield from self.revision_log([revision_id], limit)
def release_add(self, releases):
"""Add releases to the storage
Args:
releases: iterable of dictionaries representing the individual
releases to add. Each dict has the following keys:
- id (sha1_git): id of the release to add
- revision (sha1_git): id of the revision the release points
to
- date (datetime.DateTime): the date the release was made
- date_offset (int): offset from UTC in minutes the release was
made
- date_neg_utc_offset (boolean): whether a null date_offset
represents a negative UTC offset
- name (bytes): the name of the release
- comment (bytes): the comment associated with the release
- author_name (bytes): the name of the release author
- author_email (bytes): the email of the release author
"""
db = self.db
release_ids = set(release['id'] for release in releases)
releases_missing = set(self.release_missing(release_ids))
if not releases_missing:
return
with db.transaction() as cur:
db.mktemp_release(cur)
releases_filtered = (
converters.release_to_db(release) for release in releases
if release['id'] in releases_missing
)
db.copy_to(releases_filtered, 'tmp_release', db.release_add_cols,
cur)
db.release_add_from_temp(cur)
@db_transaction_generator
def release_missing(self, releases, cur=None):
"""List releases missing from storage
Args: an iterable of release ids
Returns: a list of missing release ids
"""
db = self.db
# Create temporary table for metadata injection
db.store_tmp_bytea(releases, cur)
for obj in db.release_missing_from_temp(cur):
yield obj[0]
@db_transaction_generator
def release_get(self, releases, cur=None):
"""Given a list of sha1, return the releases's information
Args:
releases: list of sha1s
Returns:
Generates the list of releases dict with the following keys:
- id: origin's id
- revision: origin's type
- url: origin's url
- lister: lister's uuid
- project: project's uuid (FIXME, retrieve this information)
Raises:
ValueError if the keys does not match (url and type) nor id.
"""
db = self.db
# Create temporary table for metadata injection
db.store_tmp_bytea(releases, cur)
for release in db.release_get_from_temp(cur):
yield converters.db_to_release(
dict(zip(db.release_get_cols, release))
)
@db_transaction
def occurrence_add(self, occurrences, cur=None):
"""Add occurrences to the storage
Args:
occurrences: iterable of dictionaries representing the individual
occurrences to add. Each dict has the following keys:
- origin (int): id of the origin corresponding to the
occurrence
- branch (str): the reference name of the occurrence
- target (sha1_git): the id of the object pointed to by
the occurrence
- target_type (str): the type of object pointed to by the
occurrence
"""
db = self.db
db.mktemp_occurrence_history(cur)
db.copy_to(occurrences, 'tmp_occurrence_history',
['origin', 'branch', 'target', 'target_type', 'visit'], cur)
db.occurrence_history_add_from_temp(cur)
@db_transaction_generator
def occurrence_get(self, origin_id, cur=None):
"""Retrieve occurrence information per origin_id.
Args:
origin_id: The occurrence's origin.
Yields:
List of occurrences matching criterion.
"""
db = self.db
for line in db.occurrence_get(origin_id, cur):
yield {
'origin': line[0],
'branch': line[1],
'target': line[2],
'target_type': line[3],
}
@db_transaction
def origin_visit_add(self, origin, ts, cur=None):
"""Add an origin_visit for the origin at ts with status 'ongoing'.
Args:
origin: Visited Origin id
ts: timestamp of such visit
Returns:
Dict with keys origin and visit where:
- origin: origin identifier
- visit: the visit identifier for the new visit occurrence
- ts (datetime.DateTime): the visit date
"""
if isinstance(ts, str):
ts = dateutil.parser.parse(ts)
return {
'origin': origin,
'visit': self.db.origin_visit_add(origin, ts, cur)
}
@db_transaction
def origin_visit_update(self, origin, visit_id, status, metadata=None,
cur=None):
"""Update an origin_visit's status.
Args:
origin: Visited Origin id
visit_id: Visit's id
status: Visit's new status
metadata: Data associated to the visit
Returns:
None
"""
return self.db.origin_visit_update(origin, visit_id, status, metadata,
cur)
@db_transaction_generator
- def origin_visit_get(self, origin, cur=None):
+ def origin_visit_get(self, origin, last_visit=None, limit=None, cur=None):
"""Retrieve all the origin's visit's information.
Args:
- origin: The occurrence's origin (identifier).
+ origin (int): The occurrence's origin (identifier).
+ last_visit (int): Starting point from which listing the next visits
+ Default to None
+ limit (int): Number of results to return from the last visit.
+ Default to None
Yields:
List of visits.
"""
db = self.db
- for line in db.origin_visit_get_all(origin, cur):
+ for line in db.origin_visit_get_all(
+ origin, last_visit=last_visit, limit=limit, cur=cur):
data = dict(zip(self.db.origin_visit_get_cols, line))
yield data
@db_transaction
def origin_visit_get_by(self, origin, visit, cur=None):
"""Retrieve origin visit's information.
Args:
origin: The occurrence's origin (identifier).
Returns:
The information on that particular (origin, visit)
"""
db = self.db
ori_visit = db.origin_visit_get(origin, visit, cur)
if not ori_visit:
return None
ori_visit = dict(zip(self.db.origin_visit_get_cols, ori_visit))
occs = {}
for occ in db.occurrence_by_origin_visit(origin, visit):
_, branch_name, target, target_type = occ
occs[branch_name] = {
'target': target,
'target_type': target_type
}
ori_visit.update({
'occurrences': occs
})
return ori_visit
@db_transaction_generator
def revision_get_by(self,
origin_id,
branch_name=None,
timestamp=None,
limit=None,
cur=None):
"""Given an origin_id, retrieve occurrences' list per given criterions.
Args:
origin_id: The origin to filter on.
branch_name: (optional) branch name.
timestamp: (optional) time.
limit: (optional) limit
Yields:
List of occurrences matching the criterions or None if nothing is
found.
"""
for line in self.db.revision_get_by(origin_id,
branch_name,
timestamp,
limit=limit,
cur=cur):
data = converters.db_to_revision(
dict(zip(self.db.revision_get_cols, line))
)
if not data['type']:
yield None
continue
yield data
def release_get_by(self, origin_id, limit=None):
"""Given an origin id, return all the tag objects pointing to heads of
origin_id.
Args:
origin_id: the origin to filter on.
limit: None by default
Yields:
List of releases matching the criterions or None if nothing is
found.
"""
for line in self.db.release_get_by(origin_id, limit=limit):
data = converters.db_to_release(
dict(zip(self.db.release_get_cols, line))
)
yield data
@db_transaction
def object_find_by_sha1_git(self, ids, cur=None):
"""Return the objects found with the given ids.
Args:
ids: a generator of sha1_gits
Returns:
a dict mapping the id to the list of objects found. Each object
found is itself a dict with keys:
sha1_git: the input id
type: the type of object found
id: the id of the object found
object_id: the numeric id of the object found.
"""
db = self.db
ret = {id: [] for id in ids}
for retval in db.object_find_by_sha1_git(ids):
if retval[1]:
ret[retval[0]].append(dict(zip(db.object_find_by_sha1_git_cols,
retval)))
return ret
@db_transaction
def origin_get(self, origin, cur=None):
"""Return the origin either identified by its id or its tuple
(type, url).
Args:
origin: dictionary representing the individual
origin to find.
This dict has either the keys type and url:
- type (FIXME: enum TBD): the origin type ('git', 'wget', ...)
- url (bytes): the url the origin points to
either the id:
- id: the origin id
Returns:
the origin dict with the keys:
- id: origin's id
- type: origin's type
- url: origin's url
- lister: lister's uuid
- project: project's uuid (FIXME, retrieve this information)
Raises:
ValueError if the keys does not match (url and type) nor id.
"""
db = self.db
keys = ['id', 'type', 'url', 'lister', 'project']
origin_id = origin.get('id')
if origin_id: # check lookup per id first
ori = db.origin_get(origin_id, cur)
elif 'type' in origin and 'url' in origin: # or lookup per type, url
ori = db.origin_get_with(origin['type'], origin['url'], cur)
else: # unsupported lookup
raise ValueError('Origin must have either id or (type and url).')
if ori:
return dict(zip(keys, ori))
return None
@db_transaction
def _person_add(self, person, cur=None):
"""Add a person in storage.
BEWARE: Internal function for now.
Do not do anything fancy in case a person already exists.
Please adapt code if more checks are needed.
Args:
person dictionary with keys name and email.
Returns:
Id of the new person.
"""
db = self.db
return db.person_add(person)
@db_transaction_generator
def person_get(self, person, cur=None):
"""Return the persons identified by their ids.
Args:
person: array of ids.
Returns:
The array of persons corresponding of the ids.
"""
db = self.db
for person in db.person_get(person):
yield dict(zip(db.person_get_cols, person))
@db_transaction
def origin_add(self, origins, cur=None):
"""Add origins to the storage
Args:
origins: list of dictionaries representing the individual origins,
with the following keys:
type: the origin type ('git', 'svn', 'deb', ...)
url (bytes): the url the origin points to
Returns:
The array of ids corresponding to the given origins
"""
ret = []
for origin in origins:
ret.append(self.origin_add_one(origin, cur=cur))
return ret
@db_transaction
def origin_add_one(self, origin, cur=None):
"""Add origin to the storage
Args:
origin: dictionary representing the individual
origin to add. This dict has the following keys:
- type (FIXME: enum TBD): the origin type ('git', 'wget', ...)
- url (bytes): the url the origin points to
Returns:
the id of the added origin, or of the identical one that already
exists.
"""
db = self.db
data = db.origin_get_with(origin['type'], origin['url'], cur)
if data:
return data[0]
return db.origin_add(origin['type'], origin['url'], cur)
@db_transaction
def fetch_history_start(self, origin_id, cur=None):
"""Add an entry for origin origin_id in fetch_history. Returns the id
of the added fetch_history entry
"""
fetch_history = {
'origin': origin_id,
'date': datetime.datetime.now(tz=datetime.timezone.utc),
}
return self.db.create_fetch_history(fetch_history, cur)
@db_transaction
def fetch_history_end(self, fetch_history_id, data, cur=None):
"""Close the fetch_history entry with id `fetch_history_id`, replacing
its data with `data`.
"""
now = datetime.datetime.now(tz=datetime.timezone.utc)
fetch_history = self.db.get_fetch_history(fetch_history_id, cur)
if not fetch_history:
raise ValueError('No fetch_history with id %d' % fetch_history_id)
fetch_history['duration'] = now - fetch_history['date']
fetch_history.update(data)
self.db.update_fetch_history(fetch_history, cur)
@db_transaction
def fetch_history_get(self, fetch_history_id, cur=None):
"""Get the fetch_history entry with id `fetch_history_id`.
"""
return self.db.get_fetch_history(fetch_history_id, cur)
@db_transaction
def entity_add(self, entities, cur=None):
"""Add the given entitites to the database (in entity_history).
Args:
- entities: iterable of dictionaries containing the following keys:
- uuid (uuid): id of the entity
- parent (uuid): id of the parent entity
- name (str): name of the entity
- type (str): type of entity (one of 'organization',
'group_of_entities', 'hosting', 'group_of_persons',
'person', 'project')
- description (str, optional): description of the entity
- homepage (str): url of the entity's homepage
- active (bool): whether the entity is active
- generated (bool): whether the entity was generated
- lister_metadata (dict): lister-specific entity metadata
- metadata (dict): other metadata for the entity
- validity (datetime.DateTime array): timestamps at which we
listed the entity.
"""
db = self.db
cols = list(db.entity_history_cols)
cols.remove('id')
db.mktemp_entity_history()
db.copy_to(entities, 'tmp_entity_history', cols, cur)
db.entity_history_add_from_temp()
@db_transaction_generator
def entity_get_from_lister_metadata(self, entities, cur=None):
"""Fetch entities from the database, matching with the lister and
associated metadata.
Args:
entities: iterable of dictionaries containing the lister metadata
to look for. Useful keys are 'lister', 'type', 'id', ...
Returns:
A generator of fetched entities with all their attributes. If no
match was found, the returned entity is None.
"""
db = self.db
db.mktemp_entity_lister(cur)
mapped_entities = []
for i, entity in enumerate(entities):
mapped_entity = {
'id': i,
'lister_metadata': entity,
}
mapped_entities.append(mapped_entity)
db.copy_to(mapped_entities, 'tmp_entity_lister',
['id', 'lister_metadata'], cur)
cur.execute('''select id, %s
from swh_entity_from_tmp_entity_lister()
order by id''' %
','.join(db.entity_cols))
for id, *entity_vals in cur:
fetched_entity = dict(zip(db.entity_cols, entity_vals))
if fetched_entity['uuid']:
yield fetched_entity
else:
yield {
'uuid': None,
'lister_metadata': entities[i],
}
@db_transaction_generator
def entity_get(self, uuid, cur=None):
"""Returns the list of entity per its uuid identifier and also its
parent hierarchy.
Args:
uuid: entity's identifier
Returns:
List of entities starting with entity with uuid and the parent
hierarchy from such entity.
"""
db = self.db
for entity in db.entity_get(uuid, cur):
yield dict(zip(db.entity_cols, entity))
@db_transaction
def entity_get_one(self, uuid, cur=None):
"""Returns one entity using its uuid identifier.
Args:
uuid: entity's identifier
Returns:
the object corresponding to the given entity
"""
db = self.db
entity = db.entity_get_one(uuid, cur)
if entity:
return dict(zip(db.entity_cols, entity))
else:
return None
@db_transaction
def stat_counters(self, cur=None):
"""compute statistics about the number of tuples in various tables
Returns:
a dictionary mapping textual labels (e.g., content) to integer
values (e.g., the number of tuples in table content)
"""
return {k: v for (k, v) in self.db.stat_counters()}
@db_transaction_generator
def content_mimetype_missing(self, mimetypes, cur=None):
"""List mimetypes missing from storage.
Args:
mimetypes: iterable of dict with keys:
- id (bytes): sha1 identifier
- tool_name (str): tool used to compute the results
- tool_version (str): associated tool's version
Returns:
an iterable of missing id for the triplets id,
tool_name, tool_version
"""
db = self.db
db.mktemp_content_mimetype_missing(cur)
db.copy_to(mimetypes, 'tmp_content_mimetype_missing',
['id', 'tool_name', 'tool_version'],
cur)
for obj in db.content_mimetype_missing_from_temp(cur):
yield obj[0]
@db_transaction
def content_mimetype_add(self, mimetypes, conflict_update=False, cur=None):
"""Add mimetypes not present in storage.
Args:
mimetypes: iterable of dictionary with keys:
- id (bytes): sha1 identifier
- mimetype (bytes): raw content's mimetype
- encoding (bytes): raw content's encoding
- tool_name (str): tool used to compute the results
- tool_version (str): associated tool's version
conflict_update: Flag to determine if we want to overwrite (true)
or skip duplicates (false, the default)
"""
db = self.db
db.mktemp_content_mimetype(cur)
db.copy_to(mimetypes, 'tmp_content_mimetype',
db.content_mimetype_cols,
cur)
db.content_mimetype_add_from_temp(conflict_update, cur)
@db_transaction_generator
def content_mimetype_get(self, ids, cur=None):
db = self.db
db.store_tmp_bytea(ids, cur)
for c in db.content_mimetype_get_from_temp():
yield converters.db_to_mimetype(
dict(zip(db.content_mimetype_cols, c)))
@db_transaction_generator
def content_language_missing(self, languages, cur=None):
"""List languages missing from storage.
Args:
languages: iterable of dict with keys:
- id (bytes): sha1 identifier
- tool_name (str): tool used to compute the results
- tool_version (str): associated tool's version
Returns:
an iterable of missing id
"""
db = self.db
db.mktemp_content_language_missing(cur)
db.copy_to(languages, 'tmp_content_language_missing',
db.content_language_cols, cur)
for obj in db.content_language_missing_from_temp(cur):
yield obj[0]
@db_transaction_generator
def content_language_get(self, ids, cur=None):
db = self.db
db.store_tmp_bytea(ids, cur)
for c in db.content_language_get_from_temp():
yield converters.db_to_language(
dict(zip(db.content_language_cols, c)))
@db_transaction
def content_language_add(self, languages, conflict_update=False, cur=None):
"""Add languages not present in storage.
Args:
languages: iterable of dictionary with keys:
- id: sha1
- lang: bytes
conflict_update: Flag to determine if we want to overwrite (true)
or skip duplicates (false, the default)
"""
db = self.db
db.mktemp_content_language(cur)
# empty language is mapped to 'unknown'
db.copy_to(
({
'id': l['id'],
'lang': 'unknown' if not l['lang'] else l['lang'],
'tool_name': l['tool_name'],
'tool_version': l['tool_version'],
} for l in languages),
'tmp_content_language', db.content_language_cols, cur)
db.content_language_add_from_temp(conflict_update, cur)
@db_transaction_generator
def content_ctags_missing(self, ctags, cur=None):
"""List ctags missing from storage.
Args:
ctags: iterable of dict with keys:
- id (bytes): sha1 identifier
- tool_name (str): tool name used
- tool_version (str): associated version
Returns:
an iterable of missing id
"""
db = self.db
db.mktemp_content_ctags_missing(cur)
db.copy_to(ctags,
tblname='tmp_content_ctags_missing',
columns=['id', 'tool_name', 'tool_version'],
cur=cur)
for obj in db.content_ctags_missing_from_temp(cur):
yield obj[0]
@db_transaction_generator
def content_ctags_get(self, ids, cur=None):
"""Retrieve ctags per id.
Args:
ids ([sha1]): Iterable of sha1
"""
db = self.db
db.store_tmp_bytea(ids, cur)
for c in db.content_ctags_get_from_temp():
yield converters.db_to_ctags(dict(zip(db.content_ctags_cols, c)))
@db_transaction
def content_ctags_add(self, ctags, conflict_update=False, cur=None):
"""Add ctags not present in storage
Args:
ctags: iterable of dictionaries with keys:
- id (bytes): sha1
- ctags ([dict]): List of dictionary with keys (name,
kind, line, language)
"""
db = self.db
def _convert_ctags(ctags):
"""Convert ctags to list of ctags.
"""
res = []
for ctag in ctags:
res.extend(converters.ctags_to_db(ctag))
return res
db.mktemp_content_ctags(cur)
db.copy_to(_convert_ctags(ctags),
tblname='tmp_content_ctags',
columns=db.content_ctags_cols,
cur=cur)
db.content_ctags_add_from_temp(conflict_update, cur)
@db_transaction_generator
def content_ctags_search(self, expression,
limit=10, last_sha1=None, cur=None):
"""Search through content's raw ctags symbols.
Args:
expression (str): Expression to search for
limit (int): Number of rows to return (default to 10).
last_sha1 (str): Offset from which retrieving data (default to '').
Yields:
rows of ctags including id, name, lang, kind, line, etc...
"""
db = self.db
for obj in db.content_ctags_search(expression, last_sha1, limit,
cur=cur):
yield converters.db_to_ctags(dict(zip(db.content_ctags_cols, obj)))
@db_transaction_generator
def content_fossology_license_missing(self, licenses, cur=None):
"""List license missing from storage.
Args:
licenses ([bytes]): iterable of sha1
Returns:
an iterable of missing id
"""
db = self.db
db.mktemp_content_fossology_license_missing(cur)
db.copy_to(licenses, 'tmp_content_fossology_license_missing',
['id', 'tool_name', 'tool_version'],
cur)
for obj in db.content_fossology_license_missing_from_temp(cur):
yield obj[0]
@db_transaction_generator
def content_fossology_license_get(self, ids, cur=None):
"""Retrieve licenses per id.
Args:
ids ([sha1]): Iterable of sha1
Yields:
List of dict with the following keys:
- id (bytes)
- licenses ([str]): associated licenses for that content
"""
db = self.db
db.store_tmp_bytea(ids, cur)
for c in db.content_fossology_license_get_from_temp():
yield dict(zip(db.content_fossology_license_cols, c))
@db_transaction
def content_fossology_license_add(self, licenses,
conflict_update=False, cur=None):
"""Add licenses not present in storage.
Args:
licenses ([dict]): iterable of dict with keys:
- id: sha1
- license ([bytes]): List of licenses associated to sha1
- tool (str): nomossa
conflict_update: Flag to determine if we want to overwrite (true)
or skip duplicates (false, the default)
Returns:
List of content_license entries which failed due to
unknown licenses
"""
db = self.db
# First, we check the licenses are ok
licenses_to_check = set() # set of licenses to check
content_licenses_to_add = {} # content_licenses to add
names_to_content_license = {} # map from names to content licenses
for c in licenses:
id = c['id']
for name in c['licenses']:
licenses_to_check.add(name)
l = names_to_content_license.get(name, [])
l.append(id)
names_to_content_license[name] = l
content_licenses_to_add[id] = c
db.mktemp_content_fossology_license_unknown()
db.copy_to(({'name': name} for name in licenses_to_check),
tblname='tmp_content_fossology_license_unknown',
columns=['name'],
cur=cur)
unknown_licenses = db.content_fossology_license_unknown(cur)
# We filter out wrong content_license (this will be the result)
wrong_content_licenses = []
for name, in unknown_licenses:
for id in names_to_content_license[name]:
# we can remove it multiple times since one content
# can have multiple licenses
content_license = content_licenses_to_add.pop(id, None)
if content_license:
wrong_content_licenses.append(content_license)
if content_licenses_to_add:
# Then, we add the correct ones
db.mktemp_content_fossology_license(cur)
db.copy_to(
({
'id': c['id'],
'tool_name': c['tool_name'],
'tool_version': c['tool_version'],
'license': license,
} for c in content_licenses_to_add.values()
for license in c['licenses']),
tblname='tmp_content_fossology_license',
columns=['id', 'tool_name', 'tool_version', 'license'],
cur=cur)
db.content_fossology_license_add_from_temp(conflict_update, cur)
return wrong_content_licenses
diff --git a/swh/storage/tests/test_storage.py b/swh/storage/tests/test_storage.py
index 3e2d6e8..8e92c6e 100644
--- a/swh/storage/tests/test_storage.py
+++ b/swh/storage/tests/test_storage.py
@@ -1,2986 +1,3008 @@
# Copyright (C) 2015-2016 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import copy
import datetime
import os
import psycopg2
import shutil
import tempfile
import unittest
from uuid import UUID
from unittest.mock import patch
from nose.tools import istest
from nose.plugins.attrib import attr
from swh.core.tests.db_testing import DbTestFixture
from swh.core.hashutil import hex_to_hash
from swh.storage import get_storage
from swh.storage.db import cursor_to_bytes
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
TEST_DATA_DIR = os.path.join(TEST_DIR, '../../../../swh-storage-testdata')
@attr('db')
class AbstractTestStorage(DbTestFixture):
"""Base class for Storage testing.
This class is used as-is to test local storage (see TestStorage
below) and remote storage (see TestRemoteStorage in
test_remote_storage.py.
We need to have the two classes inherit from this base class
separately to avoid nosetests running the tests from the base
class twice.
"""
TEST_DB_DUMP = os.path.join(TEST_DATA_DIR, 'dumps/swh.dump')
def setUp(self):
super().setUp()
self.maxDiff = None
self.objroot = tempfile.mkdtemp()
storage_conf = {
'cls': 'local',
'args': {
'db': self.conn,
'objstorage': {
'cls': 'pathslicing',
'args': {
'root': self.objroot,
'slicing': '0:2/2:4/4:6',
},
},
},
}
self.storage = get_storage(**storage_conf)
self.cont = {
'data': b'42\n',
'length': 3,
'sha1': hex_to_hash(
'34973274ccef6ab4dfaaf86599792fa9c3fe4689'),
'sha1_git': hex_to_hash(
'd81cc0710eb6cf9efd5b920a8453e1e07157b6cd'),
'sha256': hex_to_hash(
'673650f936cb3b0a2f93ce09d81be107'
'48b1b203c19e8176b4eefc1964a0cf3a'),
'status': 'visible',
}
self.cont2 = {
'data': b'4242\n',
'length': 5,
'sha1': hex_to_hash(
'61c2b3a30496d329e21af70dd2d7e097046d07b7'),
'sha1_git': hex_to_hash(
'36fade77193cb6d2bd826161a0979d64c28ab4fa'),
'sha256': hex_to_hash(
'859f0b154fdb2d630f45e1ecae4a8629'
'15435e663248bb8461d914696fc047cd'),
'status': 'visible',
}
self.cont3 = {
'data': b'424242\n',
'length': 7,
'sha1': hex_to_hash(
'3e21cc4942a4234c9e5edd8a9cacd1670fe59f13'),
'sha1_git': hex_to_hash(
'c932c7649c6dfa4b82327d121215116909eb3bea'),
'sha256': hex_to_hash(
'92fb72daf8c6818288a35137b72155f5'
'07e5de8d892712ab96277aaed8cf8a36'),
'status': 'visible',
}
self.missing_cont = {
'data': b'missing\n',
'length': 8,
'sha1': hex_to_hash(
'f9c24e2abb82063a3ba2c44efd2d3c797f28ac90'),
'sha1_git': hex_to_hash(
'33e45d56f88993aae6a0198013efa80716fd8919'),
'sha256': hex_to_hash(
'6bbd052ab054ef222c1c87be60cd191a'
'ddedd24cc882d1f5f7f7be61dc61bb3a'),
'status': 'absent',
}
self.skipped_cont = {
'length': 1024 * 1024 * 200,
'sha1_git': hex_to_hash(
'33e45d56f88993aae6a0198013efa80716fd8920'),
'reason': 'Content too long',
'status': 'absent',
}
self.skipped_cont2 = {
'length': 1024 * 1024 * 300,
'sha1_git': hex_to_hash(
'33e45d56f88993aae6a0198013efa80716fd8921'),
'reason': 'Content too long',
'status': 'absent',
}
self.dir = {
'id': b'4\x013\x422\x531\x000\xf51\xe62\xa73\xff7\xc3\xa90',
'entries': [
{
'name': b'foo',
'type': 'file',
'target': self.cont['sha1_git'],
'perms': 0o644,
},
{
'name': b'bar\xc3',
'type': 'dir',
'target': b'12345678901234567890',
'perms': 0o2000,
},
],
}
self.dir2 = {
'id': b'4\x013\x422\x531\x000\xf51\xe62\xa73\xff7\xc3\xa95',
'entries': [
{
'name': b'oof',
'type': 'file',
'target': self.cont2['sha1_git'],
'perms': 0o644,
}
],
}
self.dir3 = {
'id': hex_to_hash('33e45d56f88993aae6a0198013efa80716fd8921'),
'entries': [
{
'name': b'foo',
'type': 'file',
'target': self.cont['sha1_git'],
'perms': 0o644,
},
{
'name': b'bar',
'type': 'dir',
'target': b'12345678901234560000',
'perms': 0o2000,
},
{
'name': b'hello',
'type': 'file',
'target': b'12345678901234567890',
'perms': 0o644,
},
],
}
self.minus_offset = datetime.timezone(datetime.timedelta(minutes=-120))
self.plus_offset = datetime.timezone(datetime.timedelta(minutes=120))
self.revision = {
'id': b'56789012345678901234',
'message': b'hello',
'author': {
'name': b'Nicolas Dandrimont',
'email': b'nicolas@example.com',
'fullname': b'Nicolas Dandrimont <nicolas@example.com> ',
},
'date': {
'timestamp': 1234567890,
'offset': 120,
'negative_utc': None,
},
'committer': {
'name': b'St\xc3fano Zacchiroli',
'email': b'stefano@example.com',
'fullname': b'St\xc3fano Zacchiroli <stefano@example.com>'
},
'committer_date': {
'timestamp': 1123456789,
'offset': 0,
'negative_utc': True,
},
'parents': [b'01234567890123456789', b'23434512345123456789'],
'type': 'git',
'directory': self.dir['id'],
'metadata': {
'checksums': {
'sha1': 'tarball-sha1',
'sha256': 'tarball-sha256',
},
'signed-off-by': 'some-dude',
'extra_headers': [
['gpgsig', b'test123'],
['mergetags', [b'foo\\bar', b'\x22\xaf\x89\x80\x01\x00']],
],
},
'synthetic': True
}
self.revision2 = {
'id': b'87659012345678904321',
'message': b'hello again',
'author': {
'name': b'Roberto Dicosmo',
'email': b'roberto@example.com',
'fullname': b'Roberto Dicosmo <roberto@example.com>',
},
'date': {
'timestamp': 1234567843.22,
'offset': -720,
'negative_utc': None,
},
'committer': {
'name': b'tony',
'email': b'ar@dumont.fr',
'fullname': b'tony <ar@dumont.fr>',
},
'committer_date': {
'timestamp': 1123456789,
'offset': 0,
'negative_utc': False,
},
'parents': [b'01234567890123456789'],
'type': 'git',
'directory': self.dir2['id'],
'metadata': None,
'synthetic': False
}
self.revision3 = {
'id': hex_to_hash('7026b7c1a2af56521e951c01ed20f255fa054238'),
'message': b'a simple revision with no parents this time',
'author': {
'name': b'Roberto Dicosmo',
'email': b'roberto@example.com',
'fullname': b'Roberto Dicosmo <roberto@example.com>',
},
'date': {
'timestamp': 1234567843.22,
'offset': -720,
'negative_utc': None,
},
'committer': {
'name': b'tony',
'email': b'ar@dumont.fr',
'fullname': b'tony <ar@dumont.fr>',
},
'committer_date': {
'timestamp': 1127351742,
'offset': 0,
'negative_utc': False,
},
'parents': [],
'type': 'git',
'directory': self.dir2['id'],
'metadata': None,
'synthetic': True
}
self.revision4 = {
'id': hex_to_hash('368a48fe15b7db2383775f97c6b247011b3f14f4'),
'message': b'parent of self.revision2',
'author': {
'name': b'me',
'email': b'me@soft.heri',
'fullname': b'me <me@soft.heri>',
},
'date': {
'timestamp': 1244567843.22,
'offset': -720,
'negative_utc': None,
},
'committer': {
'name': b'committer-dude',
'email': b'committer@dude.com',
'fullname': b'committer-dude <committer@dude.com>',
},
'committer_date': {
'timestamp': 1244567843.22,
'offset': -720,
'negative_utc': None,
},
'parents': [self.revision3['id']],
'type': 'git',
'directory': self.dir['id'],
'metadata': None,
'synthetic': False
}
self.origin = {
'url': 'file:///dev/null',
'type': 'git',
}
self.origin2 = {
'url': 'file:///dev/zero',
'type': 'git',
}
self.date_visit1 = datetime.datetime(2015, 1, 1, 23, 0, 0,
tzinfo=datetime.timezone.utc)
self.occurrence = {
'branch': b'master',
'target': b'67890123456789012345',
'target_type': 'revision',
}
self.date_visit2 = datetime.datetime(2015, 1, 1, 23, 0, 0,
tzinfo=datetime.timezone.utc)
self.occurrence2 = {
'branch': b'master',
'target': self.revision2['id'],
'target_type': 'revision',
}
self.date_visit3 = datetime.datetime(2015, 1, 1, 23, 0, 0,
tzinfo=datetime.timezone.utc)
# template occurrence to be filled in test (cf. revision_log_by)
self.occurrence3 = {
'branch': b'master',
'target_type': 'revision',
}
self.release = {
'id': b'87659012345678901234',
'name': b'v0.0.1',
'author': {
'name': b'olasd',
'email': b'nic@olasd.fr',
'fullname': b'olasd <nic@olasd.fr>',
},
'date': {
'timestamp': 1234567890,
'offset': 42,
'negative_utc': None,
},
'target': b'43210987654321098765',
'target_type': 'revision',
'message': b'synthetic release',
'synthetic': True,
}
self.release2 = {
'id': b'56789012348765901234',
'name': b'v0.0.2',
'author': {
'name': b'tony',
'email': b'ar@dumont.fr',
'fullname': b'tony <ar@dumont.fr>',
},
'date': {
'timestamp': 1634366813,
'offset': -120,
'negative_utc': None,
},
'target': b'432109\xa9765432\xc309\x00765',
'target_type': 'revision',
'message': b'v0.0.2\nMisc performance improvments + bug fixes',
'synthetic': False
}
self.release3 = {
'id': b'87659012345678904321',
'name': b'v0.0.2',
'author': {
'name': b'tony',
'email': b'tony@ardumont.fr',
'fullname': b'tony <tony@ardumont.fr>',
},
'date': {
'timestamp': 1634336813,
'offset': 0,
'negative_utc': False,
},
'target': self.revision2['id'],
'target_type': 'revision',
'message': b'yet another synthetic release',
'synthetic': True,
}
self.fetch_history_date = datetime.datetime(
2015, 1, 2, 21, 0, 0,
tzinfo=datetime.timezone.utc)
self.fetch_history_end = datetime.datetime(
2015, 1, 2, 23, 0, 0,
tzinfo=datetime.timezone.utc)
self.fetch_history_duration = (self.fetch_history_end -
self.fetch_history_date)
self.fetch_history_data = {
'status': True,
'result': {'foo': 'bar'},
'stdout': 'blabla',
'stderr': 'blablabla',
}
self.entity1 = {
'uuid': UUID('f96a7ec1-0058-4920-90cc-7327e4b5a4bf'),
# GitHub users
'parent': UUID('ad6df473-c1d2-4f40-bc58-2b091d4a750e'),
'name': 'github:user:olasd',
'type': 'person',
'description': 'Nicolas Dandrimont',
'homepage': 'http://example.com',
'active': True,
'generated': True,
'lister_metadata': {
# swh.lister.github
'lister': '34bd6b1b-463f-43e5-a697-785107f598e4',
'id': 12877,
'type': 'user',
'last_activity': '2015-11-03',
},
'metadata': None,
'validity': [
datetime.datetime(2015, 11, 3, 11, 0, 0,
tzinfo=datetime.timezone.utc),
]
}
self.entity1_query = {
'lister': '34bd6b1b-463f-43e5-a697-785107f598e4',
'id': 12877,
'type': 'user',
}
self.entity2 = {
'uuid': UUID('3903d075-32d6-46d4-9e29-0aef3612c4eb'),
# GitHub users
'parent': UUID('ad6df473-c1d2-4f40-bc58-2b091d4a750e'),
'name': 'github:user:zacchiro',
'type': 'person',
'description': 'Stefano Zacchiroli',
'homepage': 'http://example.com',
'active': True,
'generated': True,
'lister_metadata': {
# swh.lister.github
'lister': '34bd6b1b-463f-43e5-a697-785107f598e4',
'id': 216766,
'type': 'user',
'last_activity': '2015-11-03',
},
'metadata': None,
'validity': [
datetime.datetime(2015, 11, 3, 11, 0, 0,
tzinfo=datetime.timezone.utc),
]
}
self.entity3 = {
'uuid': UUID('111df473-c1d2-4f40-bc58-2b091d4a7111'),
# GitHub users
'parent': UUID('222df473-c1d2-4f40-bc58-2b091d4a7222'),
'name': 'github:user:ardumont',
'type': 'person',
'description': 'Antoine R. Dumont a.k.a tony',
'homepage': 'https://ardumont.github.io',
'active': True,
'generated': True,
'lister_metadata': {
'lister': '34bd6b1b-463f-43e5-a697-785107f598e4',
'id': 666,
'type': 'user',
'last_activity': '2016-01-15',
},
'metadata': None,
'validity': [
datetime.datetime(2015, 11, 3, 11, 0, 0,
tzinfo=datetime.timezone.utc),
]
}
self.entity4 = {
'uuid': UUID('222df473-c1d2-4f40-bc58-2b091d4a7222'),
# GitHub users
'parent': None,
'name': 'github:user:ToNyX',
'type': 'person',
'description': 'ToNyX',
'homepage': 'https://ToNyX.github.io',
'active': True,
'generated': True,
'lister_metadata': {
'lister': '34bd6b1b-463f-43e5-a697-785107f598e4',
'id': 999,
'type': 'user',
'last_activity': '2015-12-24',
},
'metadata': None,
'validity': [
datetime.datetime(2015, 11, 3, 11, 0, 0,
tzinfo=datetime.timezone.utc),
]
}
self.entity2_query = {
'lister_metadata': {
'lister': '34bd6b1b-463f-43e5-a697-785107f598e4',
'id': 216766,
'type': 'user',
},
}
def tearDown(self):
shutil.rmtree(self.objroot)
self.cursor.execute("""SELECT table_name FROM information_schema.tables
WHERE table_schema = %s""", ('public',))
tables = set(table for (table,) in self.cursor.fetchall())
tables -= {'dbversion', 'entity', 'entity_history', 'listable_entity',
'fossology_license', 'indexer_configuration'}
for table in tables:
self.cursor.execute('truncate table %s cascade' % table)
self.cursor.execute('delete from entity where generated=true')
self.cursor.execute('delete from entity_history where generated=true')
self.conn.commit()
super().tearDown()
@istest
def check_config(self):
self.assertTrue(self.storage.check_config(check_write=True))
self.assertTrue(self.storage.check_config(check_write=False))
@istest
def content_add(self):
cont = self.cont
self.storage.content_add([cont])
if hasattr(self.storage, 'objstorage'):
self.assertIn(cont['sha1'], self.storage.objstorage)
self.cursor.execute('SELECT sha1, sha1_git, sha256, length, status'
' FROM content WHERE sha1 = %s',
(cont['sha1'],))
datum = self.cursor.fetchone()
self.assertEqual(
(datum[0].tobytes(), datum[1].tobytes(), datum[2].tobytes(),
datum[3], datum[4]),
(cont['sha1'], cont['sha1_git'], cont['sha256'],
cont['length'], 'visible'))
@istest
def content_add_collision(self):
cont1 = self.cont
# create (corrupted) content with same sha1{,_git} but != sha256
cont1b = cont1.copy()
sha256_array = bytearray(cont1b['sha256'])
sha256_array[0] += 1
cont1b['sha256'] = bytes(sha256_array)
with self.assertRaises(psycopg2.IntegrityError):
self.storage.content_add([cont1, cont1b])
@istest
def skipped_content_add(self):
cont = self.skipped_cont
cont2 = self.skipped_cont2
self.storage.content_add([cont])
self.storage.content_add([cont2])
self.cursor.execute('SELECT sha1, sha1_git, sha256, length, status,'
'reason FROM skipped_content ORDER BY sha1_git')
datum = self.cursor.fetchone()
self.assertEqual(
(datum[0], datum[1].tobytes(), datum[2],
datum[3], datum[4], datum[5]),
(None, cont['sha1_git'], None,
cont['length'], 'absent', 'Content too long'))
datum2 = self.cursor.fetchone()
self.assertEqual(
(datum2[0], datum2[1].tobytes(), datum2[2],
datum2[3], datum2[4], datum2[5]),
(None, cont2['sha1_git'], None,
cont2['length'], 'absent', 'Content too long'))
@istest
def content_missing(self):
cont2 = self.cont2
missing_cont = self.missing_cont
self.storage.content_add([cont2])
gen = self.storage.content_missing([cont2, missing_cont])
self.assertEqual(list(gen), [missing_cont['sha1']])
@istest
def content_missing_per_sha1(self):
# given
cont2 = self.cont2
missing_cont = self.missing_cont
self.storage.content_add([cont2])
# when
gen = self.storage.content_missing_per_sha1([cont2['sha1'],
missing_cont['sha1']])
# then
self.assertEqual(list(gen), [missing_cont['sha1']])
@istest
def content_get_metadata(self):
cont1 = self.cont.copy()
cont2 = self.cont2.copy()
self.storage.content_add([cont1, cont2])
gen = self.storage.content_get_metadata([cont1['sha1'], cont2['sha1']])
# we only retrieve the metadata
cont1.pop('data')
cont2.pop('data')
self.assertEqual(list(gen), [cont1, cont2])
@istest
def content_get_metadata_missing_sha1(self):
cont1 = self.cont.copy()
cont2 = self.cont2.copy()
missing_cont = self.missing_cont.copy()
self.storage.content_add([cont1, cont2])
gen = self.storage.content_get_metadata([missing_cont['sha1']])
# All the metadata keys are None
missing_cont.pop('data')
for key in list(missing_cont):
if key != 'sha1':
missing_cont[key] = None
self.assertEqual(list(gen), [missing_cont])
@istest
def directory_get(self):
# given
init_missing = list(self.storage.directory_missing([self.dir['id']]))
self.assertEqual([self.dir['id']], init_missing)
self.storage.directory_add([self.dir])
# when
actual_dirs = list(self.storage.directory_get([self.dir['id']]))
self.assertEqual(len(actual_dirs), 1)
dir0 = actual_dirs[0]
self.assertEqual(dir0['id'], self.dir['id'])
# ids are generated so non deterministic value
self.assertEqual(len(dir0['file_entries']), 1)
self.assertEqual(len(dir0['dir_entries']), 1)
self.assertIsNone(dir0['rev_entries'])
after_missing = list(self.storage.directory_missing([self.dir['id']]))
self.assertEqual([], after_missing)
@istest
def directory_add(self):
init_missing = list(self.storage.directory_missing([self.dir['id']]))
self.assertEqual([self.dir['id']], init_missing)
self.storage.directory_add([self.dir])
stored_data = list(self.storage.directory_ls(self.dir['id']))
data_to_store = [{
'dir_id': self.dir['id'],
'type': ent['type'],
'target': ent['target'],
'name': ent['name'],
'perms': ent['perms'],
'status': None,
'sha1': None,
'sha1_git': None,
'sha256': None,
}
for ent in sorted(self.dir['entries'], key=lambda ent: ent['name'])
]
self.assertEqual(data_to_store, stored_data)
after_missing = list(self.storage.directory_missing([self.dir['id']]))
self.assertEqual([], after_missing)
@istest
def directory_entry_get_by_path(self):
# given
init_missing = list(self.storage.directory_missing([self.dir3['id']]))
self.assertEqual([self.dir3['id']], init_missing)
self.storage.directory_add([self.dir3])
expected_entries = [
{
'dir_id': self.dir3['id'],
'name': b'foo',
'type': 'file',
'target': self.cont['sha1_git'],
'sha1': None,
'sha1_git': None,
'sha256': None,
'status': None,
'perms': 0o644,
},
{
'dir_id': self.dir3['id'],
'name': b'bar',
'type': 'dir',
'target': b'12345678901234560000',
'sha1': None,
'sha1_git': None,
'sha256': None,
'status': None,
'perms': 0o2000,
},
{
'dir_id': self.dir3['id'],
'name': b'hello',
'type': 'file',
'target': b'12345678901234567890',
'sha1': None,
'sha1_git': None,
'sha256': None,
'status': None,
'perms': 0o644,
},
]
# when (all must be found here)
for entry, expected_entry in zip(self.dir3['entries'],
expected_entries):
actual_entry = self.storage.directory_entry_get_by_path(
self.dir3['id'],
[entry['name']])
self.assertEqual(actual_entry, expected_entry)
# when (nothing should be found here since self.dir is not persisted.)
for entry in self.dir['entries']:
actual_entry = self.storage.directory_entry_get_by_path(
self.dir['id'],
[entry['name']])
self.assertIsNone(actual_entry)
@istest
def revision_add(self):
init_missing = self.storage.revision_missing([self.revision['id']])
self.assertEqual([self.revision['id']], list(init_missing))
self.storage.revision_add([self.revision])
end_missing = self.storage.revision_missing([self.revision['id']])
self.assertEqual([], list(end_missing))
def cache_content_revision_objects(self):
self.storage.content_add([self.cont, self.cont2, self.cont3])
directory = {
'id': b'4\x013\x422\x531\x000\xf51\xe62\xa73\xff7\xc3\xa90',
'entries': [
{
'name': b'bar',
'type': 'file',
'target': self.cont2['sha1_git'],
'perms': 0o644,
},
{
'name': b'foo',
'type': 'file',
'target': self.cont['sha1_git'],
'perms': 0o644,
},
{
'name': b'bar\xc3',
'type': 'dir',
'target': b'12345678901234567890',
'perms': 0o2000,
},
],
}
directory2 = copy.deepcopy(directory)
directory2['id'] = (directory2['id'][:-1] +
bytes([(directory2['id'][-1] + 1) % 256]))
directory2['entries'][1] = {
'name': b'foo',
'type': 'file',
'target': self.cont3['sha1_git'],
'perms': 0o644,
}
self.storage.directory_add([directory, directory2])
revision = self.revision.copy()
revision['directory'] = directory['id']
revision2 = copy.deepcopy(revision)
revision2['parents'] = [revision['id']]
revision2['directory'] = directory2['id']
revision2['id'] = (revision2['id'][:-1] +
bytes([(revision2['id'][-1] + 1) % 256]))
self.storage.revision_add([revision, revision2])
return (directory, directory2, revision, revision2)
@istest
def cache_content_revision_add(self):
# Create a real arborescence tree (contents + directory) and a
# revision targeting that directory.
# Assert the cache is empty for that revision
# Then create that revision
# Trigger the cache population for that revision
# Assert the cache now contains information for that revision
# Trigger again the cache population for that revision
# Assert the cache is not modified
# given ()
(directory, directory2,
revision, revision2) = self.cache_content_revision_objects()
# assert nothing in cache yet
count_query = '''select count(*)
from cache_content_revision'''
self.cursor.execute(count_query)
ret = self.cursor.fetchone()
self.assertEqual(ret, (0, ))
# when, triggered the first time, we cache the revision
self.storage.cache_content_revision_add([revision['id']])
# the second time, we do nothing as this is already done
self.storage.cache_content_revision_add([revision['id']])
# then
self.cursor.execute(count_query)
ret = self.cursor.fetchone()
# only 2 contents exists for that revision (the second call to
# revision_cache discards as the revision is already cached)
self.assertEqual(ret, (2, ))
self.cursor.execute('select * from cache_content_revision')
ret = self.cursor.fetchall()
expected_cache_entries = [
(directory['entries'][0]['target'], False,
[[revision['id'], directory['entries'][0]['name']]]),
(directory['entries'][1]['target'], False,
[[revision['id'], directory['entries'][1]['name']]])
]
for i, expected_entry in enumerate(expected_cache_entries):
ret_entry = (ret[i][0].tobytes(), ret[i][1],
[[ret[i][2][0][0].tobytes(),
ret[i][2][0][1].tobytes()]])
self.assertEquals(ret_entry, expected_entry)
@istest
def cache_content_revision_add_twice(self):
# given ()
(directory, directory2,
revision, revision2) = self.cache_content_revision_objects()
# assert nothing in cache yet
count_query = '''select count(*)
from cache_content_revision'''
self.cursor.execute(count_query)
ret = self.cursor.fetchone()
self.assertEqual(ret, (0, ))
# when, triggered the first time, we cache the revision
self.storage.cache_content_revision_add([revision['id']])
# the second time, we do nothing as this is already done
self.storage.cache_content_revision_add([revision2['id']])
# then
self.cursor.execute('select * from cache_content_revision')
cache_entries = {
content.tobytes(): [[rev.tobytes(), path.tobytes()]
for rev, path in rev_paths]
for content, blacklisted, rev_paths in self.cursor.fetchall()
}
self.assertEquals(len(cache_entries), 3)
self.assertEquals(len(cache_entries[self.cont['sha1_git']]), 1)
self.assertEquals(len(cache_entries[self.cont2['sha1_git']]), 2)
self.assertEquals(len(cache_entries[self.cont3['sha1_git']]), 1)
@istest
def cache_content_get_all(self):
# given
(directory, directory2,
revision, revision2) = self.cache_content_revision_objects()
# assert nothing in cache yet
test_query = '''select sha1, sha1_git, sha256, ccr.revision_paths
from cache_content_revision ccr
inner join content c on c.sha1_git=ccr.content'''
self.storage.cache_content_revision_add([revision['id']])
self.cursor.execute(test_query, (revision['id'],))
ret = list(cursor_to_bytes(self.cursor))
self.assertEqual(len(ret), 2)
expected_contents = []
for entry in ret:
expected_contents.append(dict(
zip(['sha1', 'sha1_git', 'sha256', 'revision_paths'], entry)))
# 1. default filters gives everything
actual_cache_contents = list(self.storage.cache_content_get_all())
self.assertEquals(actual_cache_contents, expected_contents)
@istest
def cache_content_get(self):
# given
(directory, directory2,
revision, revision2) = self.cache_content_revision_objects()
# assert nothing in cache yet
test_query = '''select c.sha1, c.sha1_git, c.sha256, ccr.revision_paths
from cache_content_revision ccr
inner join content c on c.sha1_git=ccr.content
where ccr.content=%s'''
self.storage.cache_content_revision_add([revision['id']])
self.cursor.execute(test_query, (self.cont2['sha1_git'],))
ret = list(cursor_to_bytes(self.cursor))[0]
self.assertIsNotNone(ret)
expected_content = dict(
zip(['sha1', 'sha1_git', 'sha256', 'revision_paths'], ret))
# when
actual_cache_content = self.storage.cache_content_get(self.cont2)
# then
self.assertEquals(actual_cache_content, expected_content)
@istest
def revision_log(self):
# given
# self.revision4 -is-child-of-> self.revision3
self.storage.revision_add([self.revision3,
self.revision4])
# when
actual_results = list(self.storage.revision_log(
[self.revision4['id']]))
# hack: ids generated
for actual_result in actual_results:
del actual_result['author']['id']
del actual_result['committer']['id']
self.assertEqual(len(actual_results), 2) # rev4 -child-> rev3
self.assertEquals(actual_results[0], self.revision4)
self.assertEquals(actual_results[1], self.revision3)
@istest
def revision_log_with_limit(self):
# given
# self.revision4 -is-child-of-> self.revision3
self.storage.revision_add([self.revision3,
self.revision4])
actual_results = list(self.storage.revision_log(
[self.revision4['id']], 1))
# hack: ids generated
for actual_result in actual_results:
del actual_result['author']['id']
del actual_result['committer']['id']
self.assertEqual(len(actual_results), 1)
self.assertEquals(actual_results[0], self.revision4)
@istest
def revision_log_by(self):
# given
origin_id = self.storage.origin_add_one(self.origin2)
self.storage.revision_add([self.revision3,
self.revision4])
# occurrence3 targets 'revision4'
# with branch 'master' and origin origin_id
occurrence3 = self.occurrence3.copy()
date_visit1 = self.date_visit3
origin_visit1 = self.storage.origin_visit_add(origin_id,
date_visit1)
occurrence3.update({
'origin': origin_id,
'target': self.revision4['id'],
'visit': origin_visit1['visit'],
})
self.storage.occurrence_add([occurrence3])
# self.revision4 -is-child-of-> self.revision3
# when
actual_results = list(self.storage.revision_log_by(
origin_id,
branch_name=occurrence3['branch'],
timestamp=date_visit1))
# hack: ids generated
for actual_result in actual_results:
del actual_result['author']['id']
del actual_result['committer']['id']
self.assertEqual(len(actual_results), 2)
self.assertEquals(actual_results[0], self.revision4)
self.assertEquals(actual_results[1], self.revision3)
# when - 2
actual_results = list(self.storage.revision_log_by(
origin_id,
branch_name=None,
timestamp=None,
limit=1))
# then
for actual_result in actual_results:
del actual_result['author']['id']
del actual_result['committer']['id']
self.assertEqual(len(actual_results), 1)
self.assertEquals(actual_results[0], self.revision4)
# when - 3 (revision not found)
actual_res = list(self.storage.revision_log_by(
origin_id,
branch_name='inexistant-branch',
timestamp=None))
self.assertEquals(actual_res, [])
@staticmethod
def _short_revision(revision):
return [revision['id'], revision['parents']]
@istest
def revision_shortlog(self):
# given
# self.revision4 -is-child-of-> self.revision3
self.storage.revision_add([self.revision3,
self.revision4])
# when
actual_results = list(self.storage.revision_shortlog(
[self.revision4['id']]))
self.assertEqual(len(actual_results), 2) # rev4 -child-> rev3
self.assertEquals(list(actual_results[0]),
self._short_revision(self.revision4))
self.assertEquals(list(actual_results[1]),
self._short_revision(self.revision3))
@istest
def revision_shortlog_with_limit(self):
# given
# self.revision4 -is-child-of-> self.revision3
self.storage.revision_add([self.revision3,
self.revision4])
actual_results = list(self.storage.revision_shortlog(
[self.revision4['id']], 1))
self.assertEqual(len(actual_results), 1)
self.assertEquals(list(actual_results[0]),
self._short_revision(self.revision4))
@istest
def revision_get(self):
self.storage.revision_add([self.revision])
actual_revisions = list(self.storage.revision_get(
[self.revision['id'], self.revision2['id']]))
# when
del actual_revisions[0]['author']['id'] # hack: ids are generated
del actual_revisions[0]['committer']['id']
self.assertEqual(len(actual_revisions), 2)
self.assertEqual(actual_revisions[0], self.revision)
self.assertIsNone(actual_revisions[1])
@istest
def revision_get_no_parents(self):
self.storage.revision_add([self.revision3])
get = list(self.storage.revision_get([self.revision3['id']]))
self.assertEqual(len(get), 1)
self.assertEqual(get[0]['parents'], []) # no parents on this one
@istest
def revision_get_by(self):
# given
self.storage.content_add([self.cont2])
self.storage.directory_add([self.dir2]) # point to self.cont
self.storage.revision_add([self.revision2]) # points to self.dir
origin_id = self.storage.origin_add_one(self.origin2)
# occurrence2 points to 'revision2' with branch 'master', we
# need to point to the right origin
occurrence2 = self.occurrence2.copy()
date_visit1 = self.date_visit2
origin_visit1 = self.storage.origin_visit_add(origin_id, date_visit1)
occurrence2.update({
'origin': origin_id,
'visit': origin_visit1['visit'],
})
self.storage.occurrence_add([occurrence2])
# we want only revision 2
expected_revisions = list(self.storage.revision_get(
[self.revision2['id']]))
# when
actual_results = list(self.storage.revision_get_by(
origin_id,
occurrence2['branch'],
None))
self.assertEqual(actual_results[0], expected_revisions[0])
# when (with no branch filtering, it's still ok)
actual_results = list(self.storage.revision_get_by(
origin_id,
None,
None))
self.assertEqual(actual_results[0], expected_revisions[0])
@istest
def revision_get_by_multiple_occurrence(self):
# 2 occurrences pointing to 2 different revisions
# each occurence have 1 hour delta
# the api must return the revision whose occurrence is the nearest.
# given
self.storage.content_add([self.cont2])
self.storage.directory_add([self.dir2])
self.storage.revision_add([self.revision2, self.revision3])
origin_id = self.storage.origin_add_one(self.origin2)
# occurrence2 points to 'revision2' with branch 'master', we
# need to point to the right origin
date_visit1 = self.date_visit2
origin_visit1 = self.storage.origin_visit_add(origin_id, date_visit1)
occurrence2 = self.occurrence2.copy()
occurrence2.update({
'origin': origin_id,
'visit': origin_visit1['visit']
})
dt = datetime.timedelta(days=1)
date_visit2 = date_visit1 + dt
origin_visit2 = self.storage.origin_visit_add(origin_id, date_visit2)
occurrence3 = self.occurrence2.copy()
occurrence3.update({
'origin': origin_id,
'visit': origin_visit2['visit'],
'target': self.revision3['id'],
})
# 2 occurrences on same revision with lower validity date with 1h delta
self.storage.occurrence_add([occurrence2])
self.storage.occurrence_add([occurrence3])
# when
actual_results0 = list(self.storage.revision_get_by(
origin_id,
occurrence2['branch'],
date_visit1))
# hack: ids are generated
del actual_results0[0]['author']['id']
del actual_results0[0]['committer']['id']
self.assertEquals(len(actual_results0), 1)
self.assertEqual(actual_results0, [self.revision2])
# when
actual_results1 = list(self.storage.revision_get_by(
origin_id,
occurrence2['branch'],
date_visit1 + dt/3)) # closer to first visit
# hack: ids are generated
del actual_results1[0]['author']['id']
del actual_results1[0]['committer']['id']
self.assertEquals(len(actual_results1), 1)
self.assertEqual(actual_results1, [self.revision2])
# when
actual_results2 = list(self.storage.revision_get_by(
origin_id,
occurrence2['branch'],
date_visit1 + 2*dt/3)) # closer to second visit
del actual_results2[0]['author']['id']
del actual_results2[0]['committer']['id']
self.assertEquals(len(actual_results2), 1)
self.assertEqual(actual_results2, [self.revision3])
# when
actual_results3 = list(self.storage.revision_get_by(
origin_id,
occurrence3['branch'],
date_visit2))
# hack: ids are generated
del actual_results3[0]['author']['id']
del actual_results3[0]['committer']['id']
self.assertEquals(len(actual_results3), 1)
self.assertEqual(actual_results3, [self.revision3])
# when
actual_results4 = list(self.storage.revision_get_by(
origin_id,
None,
None))
for actual_result in actual_results4:
del actual_result['author']['id']
del actual_result['committer']['id']
self.assertEquals(len(actual_results4), 2)
self.assertCountEqual(actual_results4,
[self.revision3, self.revision2])
@istest
def release_add(self):
init_missing = self.storage.release_missing([self.release['id'],
self.release2['id']])
self.assertEqual([self.release['id'], self.release2['id']],
list(init_missing))
self.storage.release_add([self.release, self.release2])
end_missing = self.storage.release_missing([self.release['id'],
self.release2['id']])
self.assertEqual([], list(end_missing))
@istest
def release_get(self):
# given
self.storage.release_add([self.release, self.release2])
# when
actual_releases = list(self.storage.release_get([self.release['id'],
self.release2['id']]))
# then
for actual_release in actual_releases:
del actual_release['author']['id'] # hack: ids are generated
self.assertEquals([self.release, self.release2],
[actual_releases[0], actual_releases[1]])
@istest
def release_get_by(self):
# given
self.storage.revision_add([self.revision2]) # points to self.dir
self.storage.release_add([self.release3])
origin_id = self.storage.origin_add_one(self.origin2)
# occurrence2 points to 'revision2' with branch 'master', we
# need to point to the right origin
origin_visit = self.storage.origin_visit_add(origin_id,
self.date_visit2)
occurrence2 = self.occurrence2.copy()
occurrence2.update({
'origin': origin_id,
'visit': origin_visit['visit'],
})
self.storage.occurrence_add([occurrence2])
# we want only revision 2
expected_releases = list(self.storage.release_get(
[self.release3['id']]))
# when
actual_results = list(self.storage.release_get_by(
occurrence2['origin']))
# then
self.assertEqual(actual_results[0], expected_releases[0])
@istest
def origin_add_one(self):
origin0 = self.storage.origin_get(self.origin)
self.assertIsNone(origin0)
id = self.storage.origin_add_one(self.origin)
actual_origin = self.storage.origin_get({'url': self.origin['url'],
'type': self.origin['type']})
self.assertEqual(actual_origin['id'], id)
id2 = self.storage.origin_add_one(self.origin)
self.assertEqual(id, id2)
@istest
def origin_add(self):
origin0 = self.storage.origin_get(self.origin)
self.assertIsNone(origin0)
id1, id2 = self.storage.origin_add([self.origin, self.origin2])
actual_origin = self.storage.origin_get({
'url': self.origin['url'],
'type': self.origin['type'],
})
self.assertEqual(actual_origin['id'], id1)
actual_origin2 = self.storage.origin_get({
'url': self.origin2['url'],
'type': self.origin2['type'],
})
self.assertEqual(actual_origin2['id'], id2)
@istest
def origin_add_twice(self):
add1 = self.storage.origin_add([self.origin, self.origin2])
add2 = self.storage.origin_add([self.origin, self.origin2])
self.assertEqual(add1, add2)
@istest
def origin_get(self):
self.assertIsNone(self.storage.origin_get(self.origin))
id = self.storage.origin_add_one(self.origin)
# lookup per type and url (returns id)
actual_origin0 = self.storage.origin_get({'url': self.origin['url'],
'type': self.origin['type']})
self.assertEqual(actual_origin0['id'], id)
# lookup per id (returns dict)
actual_origin1 = self.storage.origin_get({'id': id})
self.assertEqual(actual_origin1, {'id': id,
'type': self.origin['type'],
'url': self.origin['url'],
'lister': None,
'project': None})
@istest
def origin_visit_add(self):
# given
self.assertIsNone(self.storage.origin_get(self.origin2))
origin_id = self.storage.origin_add_one(self.origin2)
self.assertIsNotNone(origin_id)
# when
origin_visit1 = self.storage.origin_visit_add(
origin_id,
ts=self.date_visit2)
# then
self.assertEquals(origin_visit1['origin'], origin_id)
self.assertIsNotNone(origin_visit1['visit'])
self.assertTrue(origin_visit1['visit'] > 0)
actual_origin_visits = list(self.storage.origin_visit_get(origin_id))
self.assertEquals(actual_origin_visits,
[{
'origin': origin_id,
'date': self.date_visit2,
'visit': origin_visit1['visit'],
'status': 'ongoing',
'metadata': None,
}])
@istest
def origin_visit_update(self):
# given
origin_id = self.storage.origin_add_one(self.origin2)
origin_id2 = self.storage.origin_add_one(self.origin)
origin_visit1 = self.storage.origin_visit_add(
origin_id,
ts=self.date_visit2)
origin_visit2 = self.storage.origin_visit_add(
origin_id,
ts=self.date_visit3)
origin_visit3 = self.storage.origin_visit_add(
origin_id2,
ts=self.date_visit3)
# when
visit1_metadata = {
'contents': 42,
'directories': 22,
}
self.storage.origin_visit_update(
origin_id, origin_visit1['visit'], status='full',
metadata=visit1_metadata)
self.storage.origin_visit_update(origin_id2, origin_visit3['visit'],
status='partial')
# then
actual_origin_visits = list(self.storage.origin_visit_get(origin_id))
self.assertEquals(actual_origin_visits,
[{
'origin': origin_visit2['origin'],
'date': self.date_visit2,
'visit': origin_visit1['visit'],
'status': 'full',
'metadata': visit1_metadata,
},
{
'origin': origin_visit2['origin'],
'date': self.date_visit3,
'visit': origin_visit2['visit'],
'status': 'ongoing',
'metadata': None,
}])
+ actual_origin_visits_bis = list(self.storage.origin_visit_get(
+ origin_id, limit=1))
+ self.assertEquals(actual_origin_visits_bis,
+ [{
+ 'origin': origin_visit2['origin'],
+ 'date': self.date_visit2,
+ 'visit': origin_visit1['visit'],
+ 'status': 'full',
+ 'metadata': visit1_metadata,
+ }])
+
+ actual_origin_visits_ter = list(self.storage.origin_visit_get(
+ origin_id, last_visit=origin_visit1['visit']))
+ self.assertEquals(actual_origin_visits_ter,
+ [{
+ 'origin': origin_visit2['origin'],
+ 'date': self.date_visit3,
+ 'visit': origin_visit2['visit'],
+ 'status': 'ongoing',
+ 'metadata': None,
+ }])
+
actual_origin_visits2 = list(self.storage.origin_visit_get(origin_id2))
self.assertEquals(actual_origin_visits2,
[{
'origin': origin_visit3['origin'],
'date': self.date_visit3,
'visit': origin_visit3['visit'],
'status': 'partial',
'metadata': None,
}])
@istest
def origin_visit_get_by(self):
origin_id = self.storage.origin_add_one(self.origin2)
origin_id2 = self.storage.origin_add_one(self.origin)
origin_visit1 = self.storage.origin_visit_add(
origin_id,
ts=self.date_visit2)
occurrence2 = self.occurrence2.copy()
occurrence2.update({
'origin': origin_id,
'visit': origin_visit1['visit'],
})
self.storage.occurrence_add([occurrence2])
# Add some other {origin, visit} entries
self.storage.origin_visit_add(origin_id, ts=self.date_visit3)
self.storage.origin_visit_add(origin_id2, ts=self.date_visit3)
# when
visit1_metadata = {
'contents': 42,
'directories': 22,
}
self.storage.origin_visit_update(
origin_id, origin_visit1['visit'], status='full',
metadata=visit1_metadata)
expected_origin_visit = origin_visit1.copy()
expected_origin_visit.update({
'origin': origin_id,
'visit': origin_visit1['visit'],
'date': self.date_visit2,
'metadata': visit1_metadata,
'status': 'full',
'occurrences': {
occurrence2['branch']: {
'target': occurrence2['target'],
'target_type': occurrence2['target_type'],
}
}
})
# when
actual_origin_visit1 = self.storage.origin_visit_get_by(
origin_visit1['origin'], origin_visit1['visit'])
# then
self.assertEquals(actual_origin_visit1, expected_origin_visit)
@istest
def origin_visit_get_by_no_result(self):
# No result
actual_origin_visit = self.storage.origin_visit_get_by(
10, 999)
self.assertIsNone(actual_origin_visit)
@istest
def occurrence_add(self):
occur = self.occurrence.copy()
origin_id = self.storage.origin_add_one(self.origin2)
date_visit1 = self.date_visit1
origin_visit1 = self.storage.origin_visit_add(origin_id, date_visit1)
revision = self.revision.copy()
revision['id'] = occur['target']
self.storage.revision_add([revision])
occur.update({
'origin': origin_id,
'visit': origin_visit1['visit'],
})
self.storage.occurrence_add([occur])
test_query = '''
with indiv_occurrences as (
select origin, branch, target, target_type, unnest(visits) as visit
from occurrence_history
)
select origin, branch, target, target_type, date
from indiv_occurrences
left join origin_visit using(origin, visit)
order by origin, date'''
self.cursor.execute(test_query)
ret = self.cursor.fetchall()
self.assertEqual(len(ret), 1)
self.assertEqual(
(ret[0][0], ret[0][1].tobytes(), ret[0][2].tobytes(),
ret[0][3], ret[0][4]),
(occur['origin'], occur['branch'], occur['target'],
occur['target_type'], self.date_visit1))
date_visit2 = date_visit1 + datetime.timedelta(hours=10)
origin_visit2 = self.storage.origin_visit_add(origin_id, date_visit2)
occur2 = occur.copy()
occur2.update({
'visit': origin_visit2['visit'],
})
self.storage.occurrence_add([occur2])
self.cursor.execute(test_query)
ret = self.cursor.fetchall()
self.assertEqual(len(ret), 2)
self.assertEqual(
(ret[0][0], ret[0][1].tobytes(), ret[0][2].tobytes(),
ret[0][3], ret[0][4]),
(occur['origin'], occur['branch'], occur['target'],
occur['target_type'], date_visit1))
self.assertEqual(
(ret[1][0], ret[1][1].tobytes(), ret[1][2].tobytes(),
ret[1][3], ret[1][4]),
(occur2['origin'], occur2['branch'], occur2['target'],
occur2['target_type'], date_visit2))
@istest
def occurrence_get(self):
# given
occur = self.occurrence.copy()
origin_id = self.storage.origin_add_one(self.origin2)
origin_visit1 = self.storage.origin_visit_add(origin_id,
self.date_visit1)
revision = self.revision.copy()
revision['id'] = occur['target']
self.storage.revision_add([revision])
occur.update({
'origin': origin_id,
'visit': origin_visit1['visit'],
})
self.storage.occurrence_add([occur])
self.storage.occurrence_add([occur])
# when
actual_occurrence = list(self.storage.occurrence_get(origin_id))
# then
expected_occurrence = self.occurrence.copy()
expected_occurrence.update({
'origin': origin_id
})
self.assertEquals(len(actual_occurrence), 1)
self.assertEquals(actual_occurrence[0], expected_occurrence)
def _trigger_cache_provenance(self, origin_visit):
"""Trigger cache population for cache_content_revision.
"""
ret = list(self.storage.cache_revision_origin_add(
origin_visit['origin'],
origin_visit['visit'],
))
for revision_id in ret:
self.storage.cache_content_revision_add([revision_id])
return ret
@istest
def content_find_provenance_with_present_content(self):
# 1. with something to find
# given
origin_id = self.storage.origin_add_one(self.origin2)
self.storage.content_add([self.cont2])
self.storage.directory_add([self.dir2]) # point to self.cont
self.storage.revision_add([self.revision3]) # points to self.dir
occurrence = self.occurrence3.copy()
occurrence['target'] = self.revision3['id']
origin_visit1 = self.storage.origin_visit_add(origin_id,
self.date_visit2)
occurrence.update({
'origin': origin_id,
'visit': origin_visit1['visit'],
})
self.storage.occurrence_add([occurrence])
# Trigger cache population for cache_content_revision
cached_revisions = self._trigger_cache_provenance(origin_visit1)
self.assertIn(self.revision3['id'], cached_revisions)
# when
occs = list(self.storage.content_find_provenance(
{'sha1': self.cont2['sha1']}))
# then
self.assertEquals(len(occs), 1)
self.assertEquals(occs[0]['origin'], origin_visit1['origin'])
self.assertEquals(occs[0]['visit'], origin_visit1['visit'])
self.assertEquals(occs[0]['revision'], self.revision3['id'])
self.assertEquals(occs[0]['path'], self.dir2['entries'][0]['name'])
occs2 = list(self.storage.content_find_provenance(
{'sha1_git': self.cont2['sha1_git']}))
self.assertEquals(len(occs2), 1)
self.assertEquals(occs2[0]['origin'], origin_visit1['origin'])
self.assertEquals(occs2[0]['visit'], origin_visit1['visit'])
self.assertEquals(occs2[0]['revision'], self.revision3['id'])
self.assertEquals(occs2[0]['path'], self.dir2['entries'][0]['name'])
occs3 = list(self.storage.content_find_provenance(
{'sha256': self.cont2['sha256']}))
self.assertEquals(len(occs3), 1)
self.assertEquals(occs3[0]['origin'], origin_visit1['origin'])
self.assertEquals(occs3[0]['visit'], origin_visit1['visit'])
self.assertEquals(occs3[0]['revision'], self.revision3['id'])
self.assertEquals(occs3[0]['path'], self.dir2['entries'][0]['name'])
@istest
def content_find_provenance_with_non_present_content(self):
# 1. with something that does not exist
missing_cont = self.missing_cont
occ = list(self.storage.content_find_provenance(
{'sha1': missing_cont['sha1']}))
self.assertEquals(occ, [],
"Content does not exist so no occurrence")
# 2. with something that does not exist
occ = list(self.storage.content_find_provenance(
{'sha1_git': missing_cont['sha1_git']}))
self.assertEquals(occ, [],
"Content does not exist so no occurrence")
# 3. with something that does not exist
occ = list(self.storage.content_find_provenance(
{'sha256': missing_cont['sha256']}))
self.assertEquals(occ, [],
"Content does not exist so no occurrence")
@istest
def content_find_occurrence_bad_input(self):
# 1. with bad input
with self.assertRaises(ValueError) as cm:
list(self.storage.content_find_provenance({})) # empty is bad
self.assertIn('content keys', cm.exception.args[0])
# 2. with bad input
with self.assertRaises(ValueError) as cm:
list(self.storage.content_find_provenance(
{'unknown-sha1': 'something'})) # not the right key
self.assertIn('content keys', cm.exception.args[0])
@istest
def entity_get_from_lister_metadata(self):
self.storage.entity_add([self.entity1])
fetched_entities = list(
self.storage.entity_get_from_lister_metadata(
[self.entity1_query, self.entity2_query]))
# Entity 1 should have full metadata, with last_seen/last_id instead
# of validity
entity1 = self.entity1.copy()
entity1['last_seen'] = entity1['validity'][0]
del fetched_entities[0]['last_id']
del entity1['validity']
# Entity 2 should have no metadata
entity2 = {
'uuid': None,
'lister_metadata': self.entity2_query.copy(),
}
self.assertEquals(fetched_entities, [entity1, entity2])
@istest
def entity_get_from_lister_metadata_twice(self):
self.storage.entity_add([self.entity1])
fetched_entities1 = list(
self.storage.entity_get_from_lister_metadata(
[self.entity1_query]))
fetched_entities2 = list(
self.storage.entity_get_from_lister_metadata(
[self.entity1_query]))
self.assertEquals(fetched_entities1, fetched_entities2)
@istest
def entity_get(self):
# given
self.storage.entity_add([self.entity4])
self.storage.entity_add([self.entity3])
# when: entity3 -child-of-> entity4
actual_entity3 = list(self.storage.entity_get(self.entity3['uuid']))
self.assertEquals(len(actual_entity3), 2)
# remove dynamic data (modified by db)
entity3 = self.entity3.copy()
entity4 = self.entity4.copy()
del entity3['validity']
del entity4['validity']
del actual_entity3[0]['last_seen']
del actual_entity3[0]['last_id']
del actual_entity3[1]['last_seen']
del actual_entity3[1]['last_id']
self.assertEquals(actual_entity3, [entity3, entity4])
# when: entity4 only child
actual_entity4 = list(self.storage.entity_get(self.entity4['uuid']))
self.assertEquals(len(actual_entity4), 1)
# remove dynamic data (modified by db)
entity4 = self.entity4.copy()
del entity4['validity']
del actual_entity4[0]['last_id']
del actual_entity4[0]['last_seen']
self.assertEquals(actual_entity4, [entity4])
@istest
def entity_get_one(self):
# given
self.storage.entity_add([self.entity3, self.entity4])
# when: entity3 -child-of-> entity4
actual_entity3 = self.storage.entity_get_one(self.entity3['uuid'])
# remove dynamic data (modified by db)
entity3 = self.entity3.copy()
del entity3['validity']
del actual_entity3['last_seen']
del actual_entity3['last_id']
self.assertEquals(actual_entity3, entity3)
@istest
def stat_counters(self):
expected_keys = ['content', 'directory', 'directory_entry_dir',
'occurrence', 'origin', 'person', 'revision']
counters = self.storage.stat_counters()
self.assertTrue(set(expected_keys) <= set(counters))
self.assertIsInstance(counters[expected_keys[0]], int)
@istest
def content_find_with_present_content(self):
# 1. with something to find
cont = self.cont
self.storage.content_add([cont])
actually_present = self.storage.content_find({'sha1': cont['sha1']})
actually_present.pop('ctime')
self.assertEqual(actually_present, {
'sha1': cont['sha1'],
'sha256': cont['sha256'],
'sha1_git': cont['sha1_git'],
'length': cont['length'],
'status': 'visible'
})
# 2. with something to find
actually_present = self.storage.content_find(
{'sha1_git': cont['sha1_git']})
actually_present.pop('ctime')
self.assertEqual(actually_present, {
'sha1': cont['sha1'],
'sha256': cont['sha256'],
'sha1_git': cont['sha1_git'],
'length': cont['length'],
'status': 'visible'
})
# 3. with something to find
actually_present = self.storage.content_find(
{'sha256': cont['sha256']})
actually_present.pop('ctime')
self.assertEqual(actually_present, {
'sha1': cont['sha1'],
'sha256': cont['sha256'],
'sha1_git': cont['sha1_git'],
'length': cont['length'],
'status': 'visible'
})
# 4. with something to find
actually_present = self.storage.content_find(
{'sha1': cont['sha1'],
'sha1_git': cont['sha1_git'],
'sha256': cont['sha256']})
actually_present.pop('ctime')
self.assertEqual(actually_present, {
'sha1': cont['sha1'],
'sha256': cont['sha256'],
'sha1_git': cont['sha1_git'],
'length': cont['length'],
'status': 'visible'
})
@istest
def content_find_with_non_present_content(self):
# 1. with something that does not exist
missing_cont = self.missing_cont
actually_present = self.storage.content_find(
{'sha1': missing_cont['sha1']})
self.assertIsNone(actually_present)
# 2. with something that does not exist
actually_present = self.storage.content_find(
{'sha1_git': missing_cont['sha1_git']})
self.assertIsNone(actually_present)
# 3. with something that does not exist
actually_present = self.storage.content_find(
{'sha256': missing_cont['sha256']})
self.assertIsNone(actually_present)
@istest
def content_find_bad_input(self):
# 1. with bad input
with self.assertRaises(ValueError):
self.storage.content_find({}) # empty is bad
# 2. with bad input
with self.assertRaises(ValueError):
self.storage.content_find(
{'unknown-sha1': 'something'}) # not the right key
@istest
def object_find_by_sha1_git(self):
sha1_gits = [b'00000000000000000000']
expected = {
b'00000000000000000000': [],
}
self.storage.content_add([self.cont])
sha1_gits.append(self.cont['sha1_git'])
expected[self.cont['sha1_git']] = [{
'sha1_git': self.cont['sha1_git'],
'type': 'content',
'id': self.cont['sha1'],
}]
self.storage.directory_add([self.dir])
sha1_gits.append(self.dir['id'])
expected[self.dir['id']] = [{
'sha1_git': self.dir['id'],
'type': 'directory',
'id': self.dir['id'],
}]
self.storage.revision_add([self.revision])
sha1_gits.append(self.revision['id'])
expected[self.revision['id']] = [{
'sha1_git': self.revision['id'],
'type': 'revision',
'id': self.revision['id'],
}]
self.storage.release_add([self.release])
sha1_gits.append(self.release['id'])
expected[self.release['id']] = [{
'sha1_git': self.release['id'],
'type': 'release',
'id': self.release['id'],
}]
ret = self.storage.object_find_by_sha1_git(sha1_gits)
for val in ret.values():
for obj in val:
del obj['object_id']
self.assertEqual(expected, ret)
@istest
def content_mimetype_missing(self):
# given
cont2 = self.cont2
self.storage.content_add([cont2])
mimetypes = [
{
'id': self.cont2['sha1'],
'tool_name': 'file',
'tool_version': '5.22',
},
{
'id': self.missing_cont['sha1'],
'tool_name': 'file',
'tool_version': '5.22',
}]
# when
actual_missing = self.storage.content_mimetype_missing(mimetypes)
# then
self.assertEqual(list(actual_missing), [
self.cont2['sha1'],
self.missing_cont['sha1']
])
# given
self.storage.content_mimetype_add([{
'id': self.cont2['sha1'],
'mimetype': b'text/plain',
'encoding': b'utf-8',
'tool_name': 'file',
'tool_version': '5.22',
}])
# when
actual_missing = self.storage.content_mimetype_missing(mimetypes)
# then
self.assertEqual(list(actual_missing), [self.missing_cont['sha1']])
@istest
def content_mimetype_add__drop_duplicate(self):
# given
cont2 = self.cont2
self.storage.content_add([cont2])
mimetype_v1 = {
'id': self.cont2['sha1'],
'mimetype': b'text/plain',
'encoding': b'utf-8',
'tool_name': 'file',
'tool_version': '5.22',
}
# given
self.storage.content_mimetype_add([mimetype_v1])
# when
actual_mimetypes = list(self.storage.content_mimetype_get(
[self.cont2['sha1']]))
# then
expected_mimetypes_v1 = [{
'id': self.cont2['sha1'],
'mimetype': b'text/plain',
'encoding': b'utf-8',
'tool': {
'name': 'file',
'version': '5.22',
}
}]
self.assertEqual(actual_mimetypes, expected_mimetypes_v1)
# given
mimetype_v2 = mimetype_v1.copy()
mimetype_v2.update({
'mimetype': b'text/html',
'encoding': b'us-ascii',
})
self.storage.content_mimetype_add([mimetype_v2])
actual_mimetypes = list(self.storage.content_mimetype_get(
[self.cont2['sha1']]))
# mimetype did not change as the v2 was dropped.
self.assertEqual(actual_mimetypes, expected_mimetypes_v1)
@istest
def content_mimetype_add__update_in_place_duplicate(self):
# given
cont2 = self.cont2
self.storage.content_add([cont2])
mimetype_v1 = {
'id': self.cont2['sha1'],
'mimetype': b'text/plain',
'encoding': b'utf-8',
'tool_name': 'file',
'tool_version': '5.22',
}
# given
self.storage.content_mimetype_add([mimetype_v1])
# when
actual_mimetypes = list(self.storage.content_mimetype_get(
[self.cont2['sha1']]))
expected_mimetypes_v1 = [{
'id': self.cont2['sha1'],
'mimetype': b'text/plain',
'encoding': b'utf-8',
'tool': {
'name': 'file',
'version': '5.22',
}
}]
# then
self.assertEqual(actual_mimetypes, expected_mimetypes_v1)
# given
mimetype_v2 = mimetype_v1.copy()
mimetype_v2.update({
'mimetype': b'text/html',
'encoding': b'us-ascii',
})
self.storage.content_mimetype_add([mimetype_v2], conflict_update=True)
actual_mimetypes = list(self.storage.content_mimetype_get(
[self.cont2['sha1']]))
expected_mimetypes_v2 = [{
'id': self.cont2['sha1'],
'mimetype': b'text/html',
'encoding': b'us-ascii',
'tool': {
'name': 'file',
'version': '5.22',
}
}]
# mimetype did change as the v2 was used to overwrite v1
self.assertEqual(actual_mimetypes, expected_mimetypes_v2)
@istest
def content_mimetype_get(self):
# given
cont2 = self.cont2
self.storage.content_add([cont2])
mimetypes = [self.cont2['sha1'], self.missing_cont['sha1']]
mimetype1 = {
'id': self.cont2['sha1'],
'mimetype': b'text/plain',
'encoding': b'utf-8',
'tool_name': 'file',
'tool_version': '5.22',
}
# when
self.storage.content_mimetype_add([mimetype1])
# then
actual_mimetypes = list(self.storage.content_mimetype_get(mimetypes))
# then
expected_mimetypes = [{
'id': self.cont2['sha1'],
'mimetype': b'text/plain',
'encoding': b'utf-8',
'tool': {
'name': 'file',
'version': '5.22',
}
}]
self.assertEqual(actual_mimetypes, expected_mimetypes)
@istest
def content_language_missing(self):
# given
cont2 = self.cont2
self.storage.content_add([cont2])
languages = [
{
'id': self.cont2['sha1'],
'tool_name': 'pygments',
'tool_version': '2.0.1+dfsg-1.1+deb8u1',
},
{
'id': self.missing_cont['sha1'],
'tool_name': 'pygments',
'tool_version': '2.0.1+dfsg-1.1+deb8u1',
}
]
# when
actual_missing = list(self.storage.content_language_missing(languages))
# then
self.assertEqual(list(actual_missing), [
self.cont2['sha1'],
self.missing_cont['sha1'],
])
# given
self.storage.content_language_add([{
'id': self.cont2['sha1'],
'lang': 'haskell',
'tool_name': 'pygments',
'tool_version': '2.0.1+dfsg-1.1+deb8u1',
}])
# when
actual_missing = list(self.storage.content_language_missing(languages))
# then
self.assertEqual(actual_missing, [self.missing_cont['sha1']])
@istest
def content_language_get(self):
# given
cont2 = self.cont2
self.storage.content_add([cont2])
language1 = {
'id': self.cont2['sha1'],
'lang': 'common-lisp',
'tool_name': 'pygments',
'tool_version': '2.0.1+dfsg-1.1+deb8u1',
}
# when
self.storage.content_language_add([language1])
# then
actual_languages = list(self.storage.content_language_get(
[self.cont2['sha1'], self.missing_cont['sha1']]))
# then
expected_languages = [{
'id': self.cont2['sha1'],
'lang': 'common-lisp',
'tool': {
'name': 'pygments',
'version': '2.0.1+dfsg-1.1+deb8u1',
}
}]
self.assertEqual(actual_languages, expected_languages)
@istest
def content_language_add__drop_duplicate(self):
# given
cont2 = self.cont2
self.storage.content_add([cont2])
language_v1 = {
'id': self.cont2['sha1'],
'lang': 'emacslisp',
'tool_name': 'pygments',
'tool_version': '2.0.1+dfsg-1.1+deb8u1',
}
# given
self.storage.content_language_add([language_v1])
# when
actual_languages = list(self.storage.content_language_get(
[self.cont2['sha1']]))
# then
expected_languages_v1 = [{
'id': self.cont2['sha1'],
'lang': 'emacslisp',
'tool': {
'name': 'pygments',
'version': '2.0.1+dfsg-1.1+deb8u1',
}
}]
self.assertEqual(actual_languages, expected_languages_v1)
# given
language_v2 = language_v1.copy()
language_v2.update({
'lang': 'common-lisp',
})
self.storage.content_language_add([language_v2])
actual_languages = list(self.storage.content_language_get(
[self.cont2['sha1']]))
# language did not change as the v2 was dropped.
self.assertEqual(actual_languages, expected_languages_v1)
@istest
def content_language_add__update_in_place_duplicate(self):
# given
cont2 = self.cont2
self.storage.content_add([cont2])
language_v1 = {
'id': self.cont2['sha1'],
'lang': 'common-lisp',
'tool_name': 'pygments',
'tool_version': '2.0.1+dfsg-1.1+deb8u1',
}
# given
self.storage.content_language_add([language_v1])
# when
actual_languages = list(self.storage.content_language_get(
[self.cont2['sha1']]))
# then
expected_languages_v1 = [{
'id': self.cont2['sha1'],
'lang': 'common-lisp',
'tool': {
'name': 'pygments',
'version': '2.0.1+dfsg-1.1+deb8u1',
}
}]
self.assertEqual(actual_languages, expected_languages_v1)
# given
language_v2 = language_v1.copy()
language_v2.update({
'lang': 'emacslisp',
})
self.storage.content_language_add([language_v2], conflict_update=True)
actual_languages = list(self.storage.content_language_get(
[self.cont2['sha1']]))
# language did not change as the v2 was dropped.
expected_languages_v2 = [{
'id': self.cont2['sha1'],
'lang': 'emacslisp',
'tool': {
'name': 'pygments',
'version': '2.0.1+dfsg-1.1+deb8u1',
}
}]
# language did change as the v2 was used to overwrite v1
self.assertEqual(actual_languages, expected_languages_v2)
@istest
def content_ctags_missing(self):
# given
cont2 = self.cont2
self.storage.content_add([cont2])
ctags = [
{
'id': self.cont2['sha1'],
'tool_name': 'universal-ctags',
'tool_version': '~git7859817b',
},
{
'id': self.missing_cont['sha1'],
'tool_name': 'universal-ctags',
'tool_version': '~git7859817b',
}
]
# when
actual_missing = self.storage.content_ctags_missing(ctags)
# then
self.assertEqual(list(actual_missing), [
self.cont2['sha1'],
self.missing_cont['sha1']
])
# given
self.storage.content_ctags_add([
{
'id': self.cont2['sha1'],
'tool_name': 'universal-ctags',
'tool_version': '~git7859817b',
'ctags': [{
'name': 'done',
'kind': 'variable',
'line': 119,
'lang': 'OCaml',
}]
},
])
# when
actual_missing = self.storage.content_ctags_missing(ctags)
# then
self.assertEqual(list(actual_missing), [self.missing_cont['sha1']])
@istest
def content_ctags_get(self):
# given
cont2 = self.cont2
self.storage.content_add([cont2])
ctags = [self.cont2['sha1'], self.missing_cont['sha1']]
ctag1 = {
'id': self.cont2['sha1'],
'tool_name': 'universal-ctags',
'tool_version': '~git7859817b',
'ctags': [
{
'name': 'done',
'kind': 'variable',
'line': 100,
'lang': 'Python',
},
{
'name': 'main',
'kind': 'function',
'line': 119,
'lang': 'Python',
}]
}
# when
self.storage.content_ctags_add([ctag1])
# then
actual_ctags = list(self.storage.content_ctags_get(ctags))
# then
expected_ctags = [
{
'id': self.cont2['sha1'],
'tool': {
'name': 'universal-ctags',
'version': '~git7859817b',
},
'name': 'done',
'kind': 'variable',
'line': 100,
'lang': 'Python',
},
{
'id': self.cont2['sha1'],
'tool': {
'name': 'universal-ctags',
'version': '~git7859817b',
},
'name': 'main',
'kind': 'function',
'line': 119,
'lang': 'Python',
}
]
self.assertEqual(actual_ctags, expected_ctags)
@istest
def content_ctags_search(self):
# 1. given
cont = self.cont
cont2 = self.cont2
self.storage.content_add([cont, cont2])
ctag1 = {
'id': cont['sha1'],
'tool_name': 'universal-ctags',
'tool_version': '~git7859817b',
'ctags': [
{
'name': 'hello',
'kind': 'function',
'line': 133,
'lang': 'Python',
},
{
'name': 'counter',
'kind': 'variable',
'line': 119,
'lang': 'Python',
},
]
}
ctag2 = {
'id': cont2['sha1'],
'tool_name': 'universal-ctags',
'tool_version': '~git7859817b',
'ctags': [
{
'name': 'hello',
'kind': 'variable',
'line': 100,
'lang': 'C',
},
]
}
self.storage.content_ctags_add([ctag1, ctag2])
# 1. when
actual_ctags = list(self.storage.content_ctags_search('hello',
limit=1))
# 1. then
self.assertEqual(actual_ctags, [
{
'id': ctag1['id'],
'tool': {
'name': 'universal-ctags',
'version': '~git7859817b',
},
'name': 'hello',
'kind': 'function',
'line': 133,
'lang': 'Python',
}
])
# 2. when
actual_ctags = list(self.storage.content_ctags_search(
'hello',
limit=1,
last_sha1=ctag1['id']))
# 2. then
self.assertEqual(actual_ctags, [
{
'id': ctag2['id'],
'tool': {
'name': 'universal-ctags',
'version': '~git7859817b',
},
'name': 'hello',
'kind': 'variable',
'line': 100,
'lang': 'C',
}
])
# 3. when
actual_ctags = list(self.storage.content_ctags_search('hello'))
# 3. then
self.assertEqual(actual_ctags, [
{
'id': ctag1['id'],
'tool': {
'name': 'universal-ctags',
'version': '~git7859817b',
},
'name': 'hello',
'kind': 'function',
'line': 133,
'lang': 'Python',
},
{
'id': ctag2['id'],
'tool': {
'name': 'universal-ctags',
'version': '~git7859817b',
},
'name': 'hello',
'kind': 'variable',
'line': 100,
'lang': 'C',
},
])
# 4. when
actual_ctags = list(self.storage.content_ctags_search('counter'))
# then
self.assertEqual(actual_ctags, [{
'id': ctag1['id'],
'tool': {
'name': 'universal-ctags',
'version': '~git7859817b',
},
'name': 'counter',
'kind': 'variable',
'line': 119,
'lang': 'Python',
}])
@istest
def content_ctags_search_no_result(self):
actual_ctags = list(self.storage.content_ctags_search('counter'))
self.assertEquals(actual_ctags, [])
@istest
def content_ctags_add__add_new_ctags_added(self):
# given
cont2 = self.cont2
self.storage.content_add([cont2])
ctag_v1 = {
'id': self.cont2['sha1'],
'tool_name': 'universal-ctags',
'tool_version': '~git7859817b',
'ctags': [{
'name': 'done',
'kind': 'variable',
'line': 100,
'lang': 'Scheme',
}]
}
# given
self.storage.content_ctags_add([ctag_v1])
self.storage.content_ctags_add([ctag_v1]) # conflict does nothing
# when
actual_ctags = list(self.storage.content_ctags_get(
[self.cont2['sha1']]))
# then
expected_ctags = [{
'id': self.cont2['sha1'],
'name': 'done',
'kind': 'variable',
'line': 100,
'lang': 'Scheme',
'tool': {
'name': 'universal-ctags',
'version': '~git7859817b',
}
}]
self.assertEqual(actual_ctags, expected_ctags)
# given
ctag_v2 = ctag_v1.copy()
ctag_v2.update({
'ctags': [
{
'name': 'defn',
'kind': 'function',
'line': 120,
'lang': 'Scheme',
}
]
})
self.storage.content_ctags_add([ctag_v2])
expected_ctags = [
{
'id': self.cont2['sha1'],
'name': 'done',
'kind': 'variable',
'line': 100,
'lang': 'Scheme',
'tool': {
'name': 'universal-ctags',
'version': '~git7859817b',
},
}, {
'id': self.cont2['sha1'],
'name': 'defn',
'kind': 'function',
'line': 120,
'lang': 'Scheme',
'tool': {
'name': 'universal-ctags',
'version': '~git7859817b',
},
}
]
actual_ctags = list(self.storage.content_ctags_get(
[self.cont2['sha1']]))
self.assertEqual(actual_ctags, expected_ctags)
@istest
def content_ctags_add__update_in_place(self):
# given
cont2 = self.cont2
self.storage.content_add([cont2])
ctag_v1 = {
'id': self.cont2['sha1'],
'tool_name': 'universal-ctags',
'tool_version': '~git7859817b',
'ctags': [{
'name': 'done',
'kind': 'variable',
'line': 100,
'lang': 'Scheme',
}]
}
# given
self.storage.content_ctags_add([ctag_v1])
# when
actual_ctags = list(self.storage.content_ctags_get(
[self.cont2['sha1']]))
# then
expected_ctags = [
{
'id': self.cont2['sha1'],
'name': 'done',
'kind': 'variable',
'line': 100,
'lang': 'Scheme',
'tool': {
'name': 'universal-ctags',
'version': '~git7859817b',
}
}
]
self.assertEqual(actual_ctags, expected_ctags)
# given
ctag_v2 = ctag_v1.copy()
ctag_v2.update({
'ctags': [
{
'name': 'done',
'kind': 'variable',
'line': 100,
'lang': 'Scheme',
},
{
'name': 'defn',
'kind': 'function',
'line': 120,
'lang': 'Scheme',
}
]
})
self.storage.content_ctags_add([ctag_v2], conflict_update=True)
actual_ctags = list(self.storage.content_ctags_get(
[self.cont2['sha1']]))
# ctag did change as the v2 was used to overwrite v1
expected_ctags = [
{
'id': self.cont2['sha1'],
'name': 'done',
'kind': 'variable',
'line': 100,
'lang': 'Scheme',
'tool': {
'name': 'universal-ctags',
'version': '~git7859817b',
},
},
{
'id': self.cont2['sha1'],
'name': 'defn',
'kind': 'function',
'line': 120,
'lang': 'Scheme',
'tool': {
'name': 'universal-ctags',
'version': '~git7859817b',
},
}
]
self.assertEqual(actual_ctags, expected_ctags)
@istest
def content_fossology_license_missing(self):
# given
cont = self.cont
self.storage.content_add([cont])
licenses = [
{
'id': cont['sha1'],
'tool_name': 'nomos',
'tool_version': '3.1.0rc2-31-ga2cbb8c',
}, {
'id': self.missing_cont['sha1'],
'tool_name': 'nomos',
'tool_version': '3.1.0rc2-31-ga2cbb8c',
}
]
# when
actual_missing = list(self.storage.content_fossology_license_missing(
licenses))
# then
self.assertEqual(actual_missing, [
cont['sha1'],
self.missing_cont['sha1']
])
# given
r = self.storage.content_fossology_license_add([{
'id': cont['sha1'],
'licenses': ['GPL-2.0', 'GPL-2.0+'],
'tool_name': 'nomos',
'tool_version': '3.1.0rc2-31-ga2cbb8c',
}])
self.assertEqual(r, [])
# when
actual_missing = list(self.storage.content_fossology_license_missing(
licenses))
# then
self.assertEqual(actual_missing, [self.missing_cont['sha1']])
@istest
def content_fossology_license_get(self):
# given
cont = self.cont
self.storage.content_add([cont])
licenses = [cont['sha1'], self.missing_cont['sha1']]
license1 = {
'id': cont['sha1'],
'licenses': ['GPL-2.0+'],
'tool_name': 'nomos',
'tool_version': '3.1.0rc2-31-ga2cbb8c',
}
# when
r = self.storage.content_fossology_license_add([license1])
self.assertEquals(r, [])
# then
actual_licenses = list(self.storage.content_fossology_license_get(
licenses))
# then
self.assertEqual(actual_licenses, [license1])
@istest
def content_fossology_license_add__wrong_license(self):
# given
cont = self.cont
self.storage.content_add([cont])
license_v1 = {
'id': cont['sha1'],
'licenses': ['blackhole'],
'tool_name': 'nomos',
'tool_version': '3.1.0rc2-31-ga2cbb8c',
}
# given
r = self.storage.content_fossology_license_add([license_v1])
# then
self.assertEqual(r, [license_v1])
# when
actual_licenses = list(self.storage.content_fossology_license_get(
[cont['sha1']]))
# then
self.assertEqual(actual_licenses, [])
@istest
def content_fossology_license_add__new_license_added(self):
# given
cont = self.cont
self.storage.content_add([cont])
license_v1 = {
'id': cont['sha1'],
'licenses': ['Apache-2.0'],
'tool_name': 'nomos',
'tool_version': '3.1.0rc2-31-ga2cbb8c',
}
# given
self.storage.content_fossology_license_add([license_v1])
# conflict does nothing
self.storage.content_fossology_license_add([license_v1])
# when
actual_licenses = list(self.storage.content_fossology_license_get(
[cont['sha1']]))
# then
self.assertEqual(actual_licenses[0], license_v1)
# given
license_v2 = license_v1.copy()
license_v2.update({
'licenses': ['BSD-2-Clause'],
})
self.storage.content_fossology_license_add([license_v2])
actual_licenses = list(self.storage.content_fossology_license_get(
[cont['sha1']]))
expected_license = license_v1.copy()
expected_license.update({
'licenses': ['Apache-2.0', 'BSD-2-Clause'],
})
# license did not change as the v2 was dropped.
self.assertEqual(actual_licenses[0], expected_license)
@istest
def content_fossology_license_add__update_in_place_duplicate(self):
# given
cont = self.cont
self.storage.content_add([cont])
license_v1 = {
'id': cont['sha1'],
'licenses': ['CECILL'],
'tool_name': 'nomos',
'tool_version': '3.1.0rc2-31-ga2cbb8c',
}
# given
self.storage.content_fossology_license_add([license_v1])
# conflict does nothing
self.storage.content_fossology_license_add([license_v1])
# when
actual_licenses = list(self.storage.content_fossology_license_get(
[cont['sha1']]))
# then
self.assertEqual(actual_licenses[0], license_v1)
# given
license_v2 = license_v1.copy()
license_v2.update({
'licenses': ['CECILL-2.0']
})
self.storage.content_fossology_license_add([license_v2],
conflict_update=True)
actual_licenses = list(self.storage.content_fossology_license_get(
[cont['sha1']]))
# license did change as the v2 was used to overwrite v1
self.assertEqual(actual_licenses[0], license_v2)
class TestStorage(AbstractTestStorage, unittest.TestCase):
"""Test the local storage"""
# Can only be tested with local storage as you can't mock
# datetimes for the remote server
@istest
def fetch_history(self):
origin = self.storage.origin_add_one(self.origin)
with patch('datetime.datetime'):
datetime.datetime.now.return_value = self.fetch_history_date
fetch_history_id = self.storage.fetch_history_start(origin)
datetime.datetime.now.assert_called_with(tz=datetime.timezone.utc)
with patch('datetime.datetime'):
datetime.datetime.now.return_value = self.fetch_history_end
self.storage.fetch_history_end(fetch_history_id,
self.fetch_history_data)
fetch_history = self.storage.fetch_history_get(fetch_history_id)
expected_fetch_history = self.fetch_history_data.copy()
expected_fetch_history['id'] = fetch_history_id
expected_fetch_history['origin'] = origin
expected_fetch_history['date'] = self.fetch_history_date
expected_fetch_history['duration'] = self.fetch_history_duration
self.assertEqual(expected_fetch_history, fetch_history)
@istest
def person_get(self):
# given
person0 = {
'fullname': b'bob <alice@bob>',
'name': b'bob',
'email': b'alice@bob',
}
id0 = self.storage._person_add(person0)
person1 = {
'fullname': b'tony <tony@bob>',
'name': b'tony',
'email': b'tony@bob',
}
id1 = self.storage._person_add(person1)
# when
actual_persons = self.storage.person_get([id0, id1])
# given (person injection through release for example)
self.assertEqual(
list(actual_persons), [
{
'id': id0,
'fullname': person0['fullname'],
'name': person0['name'],
'email': person0['email'],
},
{
'id': id1,
'fullname': person1['fullname'],
'name': person1['name'],
'email': person1['email'],
},
])
File Metadata
Details
Attached
Mime Type
text/x-diff
Expires
Thu, Jul 3, 12:06 PM (2 d, 12 h ago)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3452111
Attached To
rDSTOC swh-storage-cassandra
Event Timeline
Log In to Comment