Page Menu
Home
Software Heritage
Search
Configure Global Search
Log In
Files
F9341030
No One
Temporary
Actions
View File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Flag For Later
Size
29 KB
Subscribers
None
View Options
diff --git a/PKG-INFO b/PKG-INFO
index 39ddf44..b3061ff 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,10 +1,10 @@
Metadata-Version: 1.0
Name: swh.objstorage
-Version: 0.0.10
+Version: 0.0.11
Summary: Software Heritage Object Storage
Home-page: https://forge.softwareheritage.org/diffusion/DOBJS
Author: Software Heritage developers
Author-email: swh-devel@inria.fr
License: UNKNOWN
Description: UNKNOWN
Platform: UNKNOWN
diff --git a/swh.objstorage.egg-info/PKG-INFO b/swh.objstorage.egg-info/PKG-INFO
index 39ddf44..b3061ff 100644
--- a/swh.objstorage.egg-info/PKG-INFO
+++ b/swh.objstorage.egg-info/PKG-INFO
@@ -1,10 +1,10 @@
Metadata-Version: 1.0
Name: swh.objstorage
-Version: 0.0.10
+Version: 0.0.11
Summary: Software Heritage Object Storage
Home-page: https://forge.softwareheritage.org/diffusion/DOBJS
Author: Software Heritage developers
Author-email: swh-devel@inria.fr
License: UNKNOWN
Description: UNKNOWN
Platform: UNKNOWN
diff --git a/swh/objstorage/__init__.py b/swh/objstorage/__init__.py
index c62fcf1..dc3b765 100644
--- a/swh/objstorage/__init__.py
+++ b/swh/objstorage/__init__.py
@@ -1,75 +1,56 @@
from .objstorage import ObjStorage
from .objstorage_pathslicing import PathSlicingObjStorage
from .api.client import RemoteObjStorage
from .multiplexer import MultiplexerObjStorage
from .multiplexer.filter import add_filters
-__all__ = ['get_objstorage', 'ObjStorage', 'register_objstorages']
+__all__ = ['get_objstorage', 'ObjStorage']
_STORAGE_CLASSES = {
'pathslicing': PathSlicingObjStorage,
'remote': RemoteObjStorage,
}
-
-def register_objstorages(objstorages_map):
- """A function to register new objstorage instances.
-
- This is expected to be called from the client.
-
- Use example:
- from swh.objstorage import register_objstorage, get_objstorage
- from .objstorage_cloud import AwsCloudObjStorage
- from .objstorage_cloud import OpenStackCloudObjStorage
- from .objstorage_azure import AzureCloudObjStorage
-
- objstorage.register_objstorage({
- 'aws-storage': AwsCloudObjStorage,
- 'openstack-storage': OpenStackCloudObjStorage,
- 'azure-storage': AzureCloudObjStorage
- })
-
- # from now on, one can instanciate a new objstorage
- get_objstorage('azure-storage',
- {'storage-account-name': 'account-name'}...)
-
- """
- _STORAGE_CLASSES.update(objstorages_map)
+try:
+ from swh.objstorage.cloud.objstorage_azure import AzureCloudObjStorage
+ _STORAGE_CLASSES['azure-storage'] = AzureCloudObjStorage
+except ImportError:
+ pass
def get_objstorage(cls, args):
""" Create an ObjStorage using the given implementation class.
Args:
cls (str): objstorage class unique key contained in the
_STORAGE_CLASSES dict.
args (dict): arguments for the required class of objstorage
that must match exactly the one in the `__init__` method of the
class.
Returns:
subclass of ObjStorage that match the given `storage_class` argument.
Raises:
ValueError: if the given storage class is not a valid objstorage
key.
"""
try:
return _STORAGE_CLASSES[cls](**args)
except KeyError:
- raise ValueError('Storage class %s does not exists' % cls)
+ raise ValueError('Storage class %s does not exist' % cls)
def _construct_filtered_objstorage(storage_conf, filters_conf):
return add_filters(
get_objstorage(**storage_conf),
filters_conf
)
_STORAGE_CLASSES['filtered'] = _construct_filtered_objstorage
def _construct_multiplexer_objstorage(objstorages):
storages = [get_objstorage(**conf)
for conf in objstorages]
return MultiplexerObjStorage(storages)
_STORAGE_CLASSES['multiplexer'] = _construct_multiplexer_objstorage
diff --git a/swh/objstorage/api/server.py b/swh/objstorage/api/server.py
index 764471f..85b651e 100644
--- a/swh/objstorage/api/server.py
+++ b/swh/objstorage/api/server.py
@@ -1,107 +1,109 @@
# Copyright (C) 2015 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import click
import logging
from flask import Flask, g, request
from swh.core import config
-from swh.objstorage import PathSlicingObjStorage
+from swh.objstorage import get_objstorage
from swh.objstorage.api.common import (BytesRequest, decode_request,
error_handler,
encode_data_server as encode_data)
DEFAULT_CONFIG = {
- 'storage_base': ('str', '/tmp/swh-storage/objects/'),
- 'storage_slicing': ('str', '0:2/2:4/4:6')
+ 'cls': ('str', 'pathslicing'),
+ 'args': ('dict', {
+ 'root': '/srv/softwareheritage/objects',
+ 'slicing': '0:2/2:4/4:6',
+ })
}
app = Flask(__name__)
app.request_class = BytesRequest
@app.errorhandler(Exception)
def my_error_handler(exception):
return error_handler(exception, encode_data)
@app.before_request
def before_request():
- g.objstorage = PathSlicingObjStorage(app.config['storage_base'],
- app.config['storage_slicing'])
+ g.objstorage = get_objstorage(app.config['cls'], app.config['args'])
@app.route('/')
def index():
return "SWH Objstorage API server"
@app.route('/content')
def content():
return str(list(g.storage))
@app.route('/content/contains', methods=['POST'])
def contains():
return encode_data(g.objstorage.__contains__(**decode_request(request)))
@app.route('/content/add', methods=['POST'])
def add_bytes():
return encode_data(g.objstorage.add(**decode_request(request)))
@app.route('/content/get', methods=['POST'])
def get_bytes():
return encode_data(g.objstorage.get(**decode_request(request)))
@app.route('/content/get/batch', methods=['POST'])
def get_batch():
return encode_data(g.objstorage.get_batch(**decode_request(request)))
@app.route('/content/get/random', methods=['POST'])
def get_random_contents():
return encode_data(
g.objstorage.get_random(**decode_request(request))
)
@app.route('/content/check', methods=['POST'])
def check():
return encode_data(g.objstorage.check(**decode_request(request)))
def run_from_webserver(environ, start_response):
"""Run the WSGI app from the webserver, loading the configuration.
"""
config_path = '/etc/softwareheritage/storage/objstorage.ini'
app.config.update(config.read(config_path, DEFAULT_CONFIG))
handler = logging.StreamHandler()
app.logger.addHandler(handler)
return app(environ, start_response)
@click.command()
@click.argument('config-path', required=1)
@click.option('--host', default='0.0.0.0', help="Host to run the server")
@click.option('--port', default=5000, type=click.INT,
help="Binding port of the server")
@click.option('--debug/--nodebug', default=True,
help="Indicates if the server should run in debug mode")
def launch(config_path, host, port, debug):
app.config.update(config.read(config_path, DEFAULT_CONFIG))
app.run(host, port=int(port), debug=bool(debug))
if __name__ == '__main__':
launch()
diff --git a/swh/objstorage/cloud/objstorage_azure.py b/swh/objstorage/cloud/objstorage_azure.py
index 0c62e31..6175738 100644
--- a/swh/objstorage/cloud/objstorage_azure.py
+++ b/swh/objstorage/cloud/objstorage_azure.py
@@ -1,85 +1,88 @@
# Copyright (C) 2016 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import gzip
from swh.core import hashutil
from swh.objstorage.objstorage import ObjStorage, compute_hash
from swh.objstorage.exc import ObjNotFoundError, Error
from azure.storage.blob import BlockBlobService
+from azure.common import AzureMissingResourceHttpError
class AzureCloudObjStorage(ObjStorage):
"""ObjStorage with azure abilities
"""
def __init__(self, account_name, api_secret_key, container_name):
self.block_blob_service = BlockBlobService(
account_name=account_name,
account_key=api_secret_key)
self.container_name = container_name
def __contains__(self, obj_id):
hex_obj_id = hashutil.hash_to_hex(obj_id)
return self.block_blob_service.exists(
container_name=self.container_name,
blob_name=hex_obj_id)
def __iter__(self):
""" Iterate over the objects present in the storage
"""
for obj in self.block_blob_service.list_blobs(self.container_name):
yield obj.name
def __len__(self):
"""Compute the number of objects in the current object storage.
Returns:
number of objects contained in the storage.
"""
return sum(1 for i in self)
def add(self, content, obj_id=None, check_presence=True):
"""Add an obj in storage if it's not there already.
"""
if obj_id is None:
# Checksum is missing, compute it on the fly.
obj_id = compute_hash(content)
if check_presence and obj_id in self:
return obj_id
hex_obj_id = hashutil.hash_to_hex(obj_id)
# Send the gzipped content
self.block_blob_service.create_blob_from_bytes(
container_name=self.container_name,
blob_name=hex_obj_id,
blob=gzip.compress(content))
return obj_id
def restore(self, content, obj_id=None):
return self.add(content, obj_id, check_presence=False)
def get(self, obj_id):
hex_obj_id = hashutil.hash_to_hex(obj_id)
- blob = self.block_blob_service.get_blob_to_bytes(
- container_name=self.container_name,
- blob_name=hex_obj_id)
- if not blob:
+ try:
+ blob = self.block_blob_service.get_blob_to_bytes(
+ container_name=self.container_name,
+ blob_name=hex_obj_id)
+ except AzureMissingResourceHttpError:
raise ObjNotFoundError('Content %s not found!' % hex_obj_id)
+
return gzip.decompress(blob.content)
def check(self, obj_id):
# Check the content integrity
obj_content = self.get(obj_id)
content_obj_id = compute_hash(obj_content)
if content_obj_id != obj_id:
raise Error(obj_id)
diff --git a/swh/objstorage/multiplexer/filter/__init__.py b/swh/objstorage/multiplexer/filter/__init__.py
index 9410830..a150bfb 100644
--- a/swh/objstorage/multiplexer/filter/__init__.py
+++ b/swh/objstorage/multiplexer/filter/__init__.py
@@ -1,98 +1,98 @@
-import functools
-
from .read_write_filter import ReadObjStorageFilter
from .id_filter import RegexIdObjStorageFilter, PrefixIdObjStorageFilter
_FILTERS_CLASSES = {
'readonly': ReadObjStorageFilter,
'regex': RegexIdObjStorageFilter,
'prefix': PrefixIdObjStorageFilter
}
_FILTERS_PRIORITY = {
'readonly': 0,
'prefix': 1,
'regex': 2
}
def read_only():
return {'type': 'readonly'}
def id_prefix(prefix):
return {'type': 'prefix', 'prefix': prefix}
def id_regex(regex):
return {'type': 'regex', 'regex': regex}
def _filter_priority(filter_type):
- """ Get the priority of this filter.
+ """Get the priority of this filter.
+
+ Priority is a value that indicates if the operation of the filter
+ is time-consuming (smaller values means quick execution), or very
+ likely to be almost always the same value (False being small, and
+ True high).
- Priority is a value that indicates if the operation of the
- filter is time-consuming (smaller values means quick execution),
- or very likely to be almost always the same value (False being small,
- and True high).
+ In case the filters are chained, they will be ordered in a way
+ that small priorities (quick execution or instantly break the
+ chain) are executed first.
- In case the filters are chained, they will be ordered in a way that
- small priorities (quick execution or instantly break the chain) are
- executed first.
+ Default value is 1. Value 0 is recommended for storages that
+ change behavior only by disabling some operations (making the
+ method return None).
- Default value is 1. Value 0 is recommended for storages that change
- behavior only by disabling some operations (making the method return
- None).
"""
return _FILTERS_PRIORITY.get(filter_type, 1)
def add_filter(storage, filter_conf):
- """ Add a filter to the given storage.
+ """Add a filter to the given storage.
Args:
storage (ObjStorage): storage which will be filtered.
filter_conf (dict): configuration of an ObjStorageFilter, given as
a dictionnary that contains the keys:
- type: which represent the type of filter, one of the keys
- of FILTERS
- - Every arguments that this type of filter require.
+ of _FILTERS_CLASSES
+ - Every arguments that this type of filter requires.
Returns:
A filtered storage that perform only the valid operations.
+
"""
type = filter_conf['type']
- args = {k: v for k, v in filter_conf.items() if k is not 'type'}
- filter = _FILTERS_CLASSES[type](storage=storage, **args)
- return filter
+ args = {k: v for k, v in filter_conf.items() if k != 'type'}
+ filtered_storage = _FILTERS_CLASSES[type](storage=storage, **args)
+ return filtered_storage
def add_filters(storage, filter_confs):
""" Add multiple filters to the given storage.
(See filter.add_filter)
Args:
storage (ObjStorage): storage which will be filtered.
filter_confs (list): any number of filter conf, as a dict with:
- type: which represent the type of filter, one of the keys of
FILTERS.
- Every arguments that this type of filter require.
Returns:
A filtered storage that fulfill the requirement of all the given
filters.
"""
# Reverse sorting in order to put the filter with biggest priority first.
filter_confs.sort(key=lambda conf: _filter_priority(conf['type']),
reverse=True)
# Add the bigest filter to the storage, and reduce it to accumulate filters
# on top of it, until the smallest (fastest, see filter.filter_priority) is
# added.
- return functools.reduce(
- lambda stor, conf: add_filter(stor, conf),
- [storage] + filter_confs
- )
+ for filter_conf in filter_confs:
+ storage = add_filter(storage, filter_conf)
+
+ return storage
diff --git a/swh/objstorage/multiplexer/filter/filter.py b/swh/objstorage/multiplexer/filter/filter.py
index a3e8673..c514eec 100644
--- a/swh/objstorage/multiplexer/filter/filter.py
+++ b/swh/objstorage/multiplexer/filter/filter.py
@@ -1,62 +1,65 @@
# Copyright (C) 2015-2016 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
from ...objstorage import ObjStorage
class ObjStorageFilter(ObjStorage):
- """ Base implementation of a filter that allow inputs on ObjStorage or not
+ """Base implementation of a filter that allow inputs on ObjStorage or
+ not.
+
+ This class copy the API of ...objstorage in order to filter the
+ inputs of this class.
- This class copy the API of ...objstorage in order to filter the inputs
- of this class.
If the operation is allowed, return the result of this operation
applied to the destination implementation. Otherwise, just return
without any operation.
- This class is an abstract base class for a classic read/write storage.
- Filters can inherit from it and only redefine some methods in order
- to change behavior.
+ This class is an abstract base class for a classic read/write
+ storage. Filters can inherit from it and only redefine some
+ methods in order to change behavior.
+
"""
def __init__(self, storage):
self.storage = storage
def __contains__(self, *args, **kwargs):
return self.storage.__contains__(*args, **kwargs)
def __iter__(self):
""" Iterates over the content of each storages
Warning: The `__iter__` methods frequently have bad performance. You
almost certainly don't want to use this method in production as the
wrapped storage may cause performance issues.
"""
return self.storage.__iter__()
def __len__(self):
""" Compute the number of objects in the current object storage.
Warning: performance issue in `__iter__` also applies here.
Returns:
number of objects contained in the storage.
"""
return self.storage.__len__()
def add(self, content, obj_id=None, check_presence=True, *args, **kwargs):
return self.storage.add(content, obj_id, check_presence,
*args, **kwargs)
def restore(self, content, obj_id=None, *args, **kwargs):
return self.storage.restore(content, obj_id, *args, **kwargs)
def get(self, obj_id, *args, **kwargs):
return self.storage.get(obj_id, *args, **kwargs)
def check(self, obj_id, *args, **kwargs):
return self.storage.check(obj_id, *args, **kwargs)
def get_random(self, batch_size, *args, **kwargs):
return self.storage.get_random(batch_size, *args, **kwargs)
diff --git a/swh/objstorage/multiplexer/multiplexer_objstorage.py b/swh/objstorage/multiplexer/multiplexer_objstorage.py
index 25c7179..ea0a558 100644
--- a/swh/objstorage/multiplexer/multiplexer_objstorage.py
+++ b/swh/objstorage/multiplexer/multiplexer_objstorage.py
@@ -1,154 +1,159 @@
# Copyright (C) 2015-2016 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import random
from ..objstorage import ObjStorage
from ..exc import ObjNotFoundError
class MultiplexerObjStorage(ObjStorage):
- """ Implementation of ObjStorage that distribute between multiple storages
+ """Implementation of ObjStorage that distributes between multiple
+ storages.
The multiplexer object storage allows an input to be demultiplexed
- among multiple storages that will or will not accept it by themselves
- (see .filter package).
+ among multiple storages that will or will not accept it by
+ themselves (see .filter package).
- As the ids can be differents, no pre-computed ids should be submitted.
- Also, there are no guarantees that the returned ids can be used directly
- into the storages that the multiplexer manage.
+ As the ids can be differents, no pre-computed ids should be
+ submitted. Also, there are no guarantees that the returned ids
+ can be used directly into the storages that the multiplexer
+ manage.
Use case examples could be:
Example 1:
storage_v1 = filter.read_only(PathSlicingObjStorage('/dir1',
'0:2/2:4/4:6'))
storage_v2 = PathSlicingObjStorage('/dir2', '0:1/0:5')
storage = MultiplexerObjStorage([storage_v1, storage_v2])
- When using 'storage', all the new contents will only be added to the v2
- storage, while it will be retrievable from both.
+ When using 'storage', all the new contents will only be added
+ to the v2 storage, while it will be retrievable from both.
Example 2:
storage_v1 = filter.id_regex(
PathSlicingObjStorage('/dir1', '0:2/2:4/4:6'),
r'[^012].*'
)
storage_v2 = filter.if_regex(
PathSlicingObjStorage('/dir2', '0:1/0:5'),
r'[012]/*'
)
storage = MultiplexerObjStorage([storage_v1, storage_v2])
- When using this storage, the contents with a sha1 starting with 0, 1 or
- 2 will be redirected (read AND write) to the storage_v2, while the
- others will be redirected to the storage_v1.
- If a content starting with 0, 1 or 2 is present in the storage_v1, it
- would be ignored anyway.
+ When using this storage, the contents with a sha1 starting
+ with 0, 1 or 2 will be redirected (read AND write) to the
+ storage_v2, while the others will be redirected to the
+ storage_v1. If a content starting with 0, 1 or 2 is present
+ in the storage_v1, it would be ignored anyway.
+
"""
def __init__(self, storages):
self.storages = storages
def __contains__(self, obj_id):
for storage in self.storages:
if obj_id in storage:
return True
return False
def __iter__(self):
- """ Iterates over the content of each storages
+ """Iterates over the content of each storages
Due to the demultiplexer nature, same content can be in multiple
storages and may be yielded multiple times.
Warning: The `__iter__` methods frequently have bad performance. You
almost certainly don't want to use this method in production.
+
"""
for storage in self.storages:
yield from storage
def __len__(self):
- """ Compute the number of objects in the current object storage.
+ """Compute the number of objects in the current object storage.
Identical objects present in multiple storages will be counted as
multiple objects.
Warning: this currently uses `__iter__`, its warning about bad
performance applies.
Returns:
number of objects contained in the storage.
+
"""
return sum(map(len, self.storages))
def add(self, content, obj_id=None, check_presence=True):
""" Add a new object to the object storage.
If the adding step works in all the storages that accept this content,
this is a success. Otherwise, the full adding step is an error even if
it succeed in some of the storages.
Args:
content: content of the object to be added to the storage.
obj_id: checksum of [bytes] using [ID_HASH_ALGO] algorithm. When
given, obj_id will be trusted to match the bytes. If missing,
obj_id will be computed on the fly.
check_presence: indicate if the presence of the content should be
verified before adding the file.
Returns:
an id of the object into the storage. As the write-storages are
always readable as well, any id will be valid to retrieve a
content.
"""
return [storage.add(content, obj_id, check_presence)
for storage in self.storages].pop()
def restore(self, content, obj_id=None):
return [storage.restore(content, obj_id)
for storage in self.storages].pop()
def get(self, obj_id):
for storage in self.storages:
try:
return storage.get(obj_id)
except ObjNotFoundError:
continue
# If no storage contains this content, raise the error
raise ObjNotFoundError(obj_id)
def check(self, obj_id):
nb_present = 0
for storage in self.storages:
try:
storage.check(obj_id)
except ObjNotFoundError:
continue
else:
nb_present += 1
# If there is an Error because of a corrupted file, then let it pass.
# Raise the ObjNotFoundError only if the content coulnd't be found in
# all the storages.
if nb_present == 0:
raise ObjNotFoundError(obj_id)
def get_random(self, batch_size):
storages_set = [storage for storage in self.storages
if len(storage) > 0]
if len(storages_set) <= 0:
return []
while storages_set:
storage = random.choice(storages_set)
try:
return storage.get_random(batch_size)
except NotImplementedError:
storages_set.remove(storage)
# There is no storage that allow the get_random operation
raise NotImplementedError(
"There is no storage implementation into the multiplexer that "
"support the 'get_random' operation"
)
diff --git a/swh/objstorage/tests/test_objstorage_api.py b/swh/objstorage/tests/test_objstorage_api.py
index 6d23451..1d53b91 100644
--- a/swh/objstorage/tests/test_objstorage_api.py
+++ b/swh/objstorage/tests/test_objstorage_api.py
@@ -1,28 +1,36 @@
# Copyright (C) 2015 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import tempfile
import unittest
from nose.plugins.attrib import attr
from swh.objstorage import get_objstorage
from swh.objstorage.tests.objstorage_testing import ObjStorageTestFixture
from swh.objstorage.tests.server_testing import ServerTestFixture
from swh.objstorage.api.server import app
@attr('db')
class TestRemoteObjStorage(ServerTestFixture, ObjStorageTestFixture,
unittest.TestCase):
""" Test the remote archive API.
"""
def setUp(self):
- self.config = {'storage_base': tempfile.mkdtemp(),
- 'storage_slicing': '0:1/0:5'}
+ self.config = {
+ 'cls': 'pathslicing',
+ 'args': {
+ 'root': tempfile.mkdtemp(),
+ 'slicing': '0:1/0:5',
+ }
+ }
+
self.app = app
super().setUp()
- self.storage = get_objstorage('remote', {'base_url': self.url()})
+ self.storage = get_objstorage('remote', {
+ 'base_url': self.url()
+ })
diff --git a/swh/objstorage/tests/test_objstorage_azure.py b/swh/objstorage/tests/test_objstorage_azure.py
index cfd2b56..afbe955 100644
--- a/swh/objstorage/tests/test_objstorage_azure.py
+++ b/swh/objstorage/tests/test_objstorage_azure.py
@@ -1,60 +1,64 @@
# Copyright (C) 2016 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
import unittest
+from azure.common import AzureMissingResourceHttpError
+
from swh.objstorage.cloud.objstorage_azure import AzureCloudObjStorage
from objstorage_testing import ObjStorageTestFixture
class MockBlob():
""" Libcloud object mock that replicates its API """
def __init__(self, name, content):
self.name = name
self.content = content
class MockBlockBlobService():
"""Mock internal azure library which AzureCloudObjStorage depends upon.
"""
data = {}
def __init__(self, account_name, api_secret_key, container_name):
# do not care for the account_name and the api_secret_key here
self.data[container_name] = {}
def create_blob_from_bytes(self, container_name, blob_name, blob):
self.data[container_name][blob_name] = blob
def get_blob_to_bytes(self, container_name, blob_name):
if blob_name not in self.data[container_name]:
- return None
+ raise AzureMissingResourceHttpError(
+ 'Blob %s not found' % blob_name,
+ 404)
return MockBlob(name=blob_name,
content=self.data[container_name][blob_name])
def exists(self, container_name, blob_name):
return blob_name in self.data[container_name]
def list_blobs(self, container_name):
for blob_name, content in self.data[container_name].items():
yield MockBlob(name=blob_name, content=content)
class MockAzureCloudObjStorage(AzureCloudObjStorage):
""" Cloud object storage that uses a mocked driver """
def __init__(self, api_key, api_secret_key, container_name):
self.container_name = container_name
self.block_blob_service = MockBlockBlobService(api_key, api_secret_key,
container_name)
class TestAzureCloudObjStorage(ObjStorageTestFixture, unittest.TestCase):
def setUp(self):
super().setUp()
self.storage = MockAzureCloudObjStorage(
'account-name', 'api-secret-key', 'container-name')
diff --git a/version.txt b/version.txt
index a697ec8..d800817 100644
--- a/version.txt
+++ b/version.txt
@@ -1 +1 @@
-v0.0.10-0-g22b7a3f
\ No newline at end of file
+v0.0.11-0-g92f67a9
\ No newline at end of file
File Metadata
Details
Attached
Mime Type
text/x-diff
Expires
Fri, Jul 4, 11:32 AM (3 w, 3 d ago)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3255181
Attached To
rDOBJS Object storage
Event Timeline
Log In to Comment