diff --git a/dulwich/contrib/test_swift.py b/dulwich/contrib/test_swift.py index 889f8880..bd0a3569 100644 --- a/dulwich/contrib/test_swift.py +++ b/dulwich/contrib/test_swift.py @@ -1,647 +1,649 @@ # test_swift.py -- Unittests for the Swift backend. # Copyright (C) 2013 eNovance SAS # # Author: Fabien Boucher # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; version 2 # of the License or (at your option) any later version of # the License. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. """Tests for dulwich.contrib.swift.""" import posixpath from time import time from io import BytesIO try: from StringIO import StringIO except ImportError: from io import StringIO import sys from unittest import skipIf from dulwich.tests import ( TestCase, ) from dulwich.tests.test_object_store import ( ObjectStoreTests, ) from dulwich.tests.utils import ( build_pack, ) from dulwich.objects import ( Blob, Commit, Tree, Tag, parse_timezone, ) from dulwich.pack import ( REF_DELTA, write_pack_index_v2, PackData, load_pack_index_file, ) try: from simplejson import dumps as json_dumps except ImportError: from json import dumps as json_dumps missing_libs = [] try: import gevent except ImportError: missing_libs.append("gevent") try: import geventhttpclient except ImportError: missing_libs.append("geventhttpclient") try: from mock import patch except ImportError: missing_libs.append("mock") skipmsg = "Required libraries are not installed (%r)" % missing_libs skipIfPY3 = skipIf(sys.version_info[0] == 3, "SWIFT module not yet ported to python3.") if not missing_libs: from dulwich.contrib import swift config_file = """[swift] auth_url = http://127.0.0.1:8080/auth/%(version_str)s auth_ver = %(version_int)s username = test;tester password = testing region_name = %(region_name)s endpoint_type = %(endpoint_type)s concurrency = %(concurrency)s chunk_length = %(chunk_length)s cache_length = %(cache_length)s http_pool_length = %(http_pool_length)s http_timeout = %(http_timeout)s """ def_config_file = {'version_str': 'v1.0', 'version_int': 1, 'concurrency': 1, 'chunk_length': 12228, 'cache_length': 1, 'region_name': 'test', 'endpoint_type': 'internalURL', 'http_pool_length': 1, 'http_timeout': 1} def create_swift_connector(store={}): return lambda root, conf: FakeSwiftConnector(root, conf=conf, store=store) class Response(object): def __init__(self, headers={}, status=200, content=None): self.headers = headers self.status_code = status self.content = content def __getitem__(self, key): return self.headers[key] def items(self): return self.headers.items() def read(self): return self.content def fake_auth_request_v1(*args, **kwargs): ret = Response({'X-Storage-Url': 'http://127.0.0.1:8080/v1.0/AUTH_fakeuser', 'X-Auth-Token': '12' * 10}, 200) return ret def fake_auth_request_v1_error(*args, **kwargs): ret = Response({}, 401) return ret def fake_auth_request_v2(*args, **kwargs): s_url = 'http://127.0.0.1:8080/v1.0/AUTH_fakeuser' resp = {'access': {'token': {'id': '12' * 10}, 'serviceCatalog': [ {'type': 'object-store', 'endpoints': [{'region': 'test', 'internalURL': s_url, }, ] }, ] } } ret = Response(status=200, content=json_dumps(resp)) return ret def create_commit(data, marker=b'Default', blob=None): if not blob: blob = Blob.from_string(b'The blob content ' + marker) tree = Tree() tree.add(b"thefile_" + marker, 0o100644, blob.id) cmt = Commit() if data: assert isinstance(data[-1], Commit) cmt.parents = [data[-1].id] cmt.tree = tree.id author = b"John Doe " + marker + b" " cmt.author = cmt.committer = author tz = parse_timezone(b'-0200')[0] cmt.commit_time = cmt.author_time = int(time()) cmt.commit_timezone = cmt.author_timezone = tz cmt.encoding = b"UTF-8" cmt.message = b"The commit message " + marker tag = Tag() tag.tagger = b"john@doe.net" tag.message = b"Annotated tag" tag.tag_timezone = parse_timezone(b'-0200')[0] tag.tag_time = cmt.author_time tag.object = (Commit, cmt.id) tag.name = b"v_" + marker + b"_0.1" return blob, tree, tag, cmt def create_commits(length=1, marker=b'Default'): data = [] for i in range(0, length): _marker = ("%s_%s" % (marker, i)).encode() blob, tree, tag, cmt = create_commit(data, _marker) data.extend([blob, tree, tag, cmt]) return data @skipIf(missing_libs, skipmsg) class FakeSwiftConnector(object): def __init__(self, root, conf, store=None): if store: self.store = store else: self.store = {} self.conf = conf self.root = root self.concurrency = 1 self.chunk_length = 12228 self.cache_length = 1 def put_object(self, name, content): name = posixpath.join(self.root, name) if hasattr(content, 'seek'): content.seek(0) content = content.read() self.store[name] = content def get_object(self, name, range=None): name = posixpath.join(self.root, name) if not range: try: return BytesIO(self.store[name]) except KeyError: return None else: l, r = range.split('-') try: if not l: r = -int(r) return self.store[name][r:] else: return self.store[name][int(l):int(r)] except KeyError: return None def get_container_objects(self): return [{'name': k.replace(self.root + '/', '')} for k in self.store] def create_root(self): if self.root in self.store.keys(): pass else: self.store[self.root] = '' def get_object_stat(self, name): name = posixpath.join(self.root, name) if not name in self.store: return None return {'content-length': len(self.store[name])} @skipIf(missing_libs, skipmsg) @skipIfPY3 class TestSwiftObjectStore(TestCase): def setUp(self): super(TestSwiftObjectStore, self).setUp() self.conf = swift.load_conf(file=StringIO(config_file % def_config_file)) self.fsc = FakeSwiftConnector('fakerepo', conf=self.conf) def _put_pack(self, sos, commit_amount=1, marker='Default'): odata = create_commits(length=commit_amount, marker=marker) data = [(d.type_num, d.as_raw_string()) for d in odata] f = BytesIO() build_pack(f, data, store=sos) sos.add_thin_pack(f.read, None) return odata def test_load_packs(self): store = {'fakerepo/objects/pack/pack-'+'1'*40+'.idx': '', 'fakerepo/objects/pack/pack-'+'1'*40+'.pack': '', 'fakerepo/objects/pack/pack-'+'1'*40+'.info': '', 'fakerepo/objects/pack/pack-'+'2'*40+'.idx': '', 'fakerepo/objects/pack/pack-'+'2'*40+'.pack': '', 'fakerepo/objects/pack/pack-'+'2'*40+'.info': ''} fsc = FakeSwiftConnector('fakerepo', conf=self.conf, store=store) sos = swift.SwiftObjectStore(fsc) packs = sos._load_packs() self.assertEqual(len(packs), 2) for pack in packs: self.assertTrue(isinstance(pack, swift.SwiftPack)) def test_add_thin_pack(self): sos = swift.SwiftObjectStore(self.fsc) self._put_pack(sos, 1, 'Default') self.assertEqual(len(self.fsc.store), 3) def test_find_missing_objects(self): commit_amount = 3 sos = swift.SwiftObjectStore(self.fsc) odata = self._put_pack(sos, commit_amount, 'Default') head = odata[-1].id i = sos.iter_shas(sos.find_missing_objects([], [head, ], progress=None, get_tagged=None)) self.assertEqual(len(i), commit_amount * 3) shas = [d.id for d in odata] for sha, path in i: self.assertIn(sha.id, shas) def test_find_missing_objects_with_tag(self): commit_amount = 3 sos = swift.SwiftObjectStore(self.fsc) odata = self._put_pack(sos, commit_amount, 'Default') head = odata[-1].id peeled_sha = dict([(sha.object[1], sha.id) for sha in odata if isinstance(sha, Tag)]) get_tagged = lambda: peeled_sha i = sos.iter_shas(sos.find_missing_objects([], [head, ], progress=None, get_tagged=get_tagged)) self.assertEqual(len(i), commit_amount * 4) shas = [d.id for d in odata] for sha, path in i: self.assertIn(sha.id, shas) def test_find_missing_objects_with_common(self): commit_amount = 3 sos = swift.SwiftObjectStore(self.fsc) odata = self._put_pack(sos, commit_amount, 'Default') head = odata[-1].id have = odata[7].id i = sos.iter_shas(sos.find_missing_objects([have, ], [head, ], progress=None, get_tagged=None)) self.assertEqual(len(i), 3) def test_find_missing_objects_multiple_packs(self): sos = swift.SwiftObjectStore(self.fsc) commit_amount_a = 3 odataa = self._put_pack(sos, commit_amount_a, 'Default1') heada = odataa[-1].id commit_amount_b = 2 odatab = self._put_pack(sos, commit_amount_b, 'Default2') headb = odatab[-1].id i = sos.iter_shas(sos.find_missing_objects([], [heada, headb], progress=None, get_tagged=None)) self.assertEqual(len(self.fsc.store), 6) self.assertEqual(len(i), commit_amount_a * 3 + commit_amount_b * 3) shas = [d.id for d in odataa] shas.extend([d.id for d in odatab]) for sha, path in i: self.assertIn(sha.id, shas) def test_add_thin_pack_ext_ref(self): sos = swift.SwiftObjectStore(self.fsc) odata = self._put_pack(sos, 1, 'Default1') ref_blob_content = odata[0].as_raw_string() ref_blob_id = odata[0].id new_blob = Blob.from_string(ref_blob_content.replace('blob', 'yummy blob')) blob, tree, tag, cmt = \ create_commit([], marker='Default2', blob=new_blob) data = [(REF_DELTA, (ref_blob_id, blob.as_raw_string())), (tree.type_num, tree.as_raw_string()), (cmt.type_num, cmt.as_raw_string()), (tag.type_num, tag.as_raw_string())] f = BytesIO() build_pack(f, data, store=sos) sos.add_thin_pack(f.read, None) self.assertEqual(len(self.fsc.store), 6) @skipIf(missing_libs, skipmsg) class TestSwiftRepo(TestCase): def setUp(self): super(TestSwiftRepo, self).setUp() self.conf = swift.load_conf(file=StringIO(config_file % def_config_file)) def test_init(self): store = {'fakerepo/objects/pack': ''} with patch('dulwich.contrib.swift.SwiftConnector', new_callable=create_swift_connector, store=store): swift.SwiftRepo('fakerepo', conf=self.conf) def test_init_no_data(self): with patch('dulwich.contrib.swift.SwiftConnector', new_callable=create_swift_connector): self.assertRaises(Exception, swift.SwiftRepo, 'fakerepo', self.conf) def test_init_bad_data(self): store = {'fakerepo/.git/objects/pack': ''} with patch('dulwich.contrib.swift.SwiftConnector', new_callable=create_swift_connector, store=store): self.assertRaises(Exception, swift.SwiftRepo, 'fakerepo', self.conf) def test_put_named_file(self): store = {'fakerepo/objects/pack': ''} with patch('dulwich.contrib.swift.SwiftConnector', new_callable=create_swift_connector, store=store): repo = swift.SwiftRepo('fakerepo', conf=self.conf) desc = b'Fake repo' repo._put_named_file('description', desc) self.assertEqual(repo.scon.store['fakerepo/description'], desc) def test_init_bare(self): fsc = FakeSwiftConnector('fakeroot', conf=self.conf) with patch('dulwich.contrib.swift.SwiftConnector', new_callable=create_swift_connector, store=fsc.store): swift.SwiftRepo.init_bare(fsc, conf=self.conf) self.assertIn('fakeroot/objects/pack', fsc.store) self.assertIn('fakeroot/info/refs', fsc.store) self.assertIn('fakeroot/description', fsc.store) @skipIf(missing_libs, skipmsg) @skipIfPY3 class TestPackInfoLoadDump(TestCase): + def setUp(self): + super(TestPackInfoLoadDump, self).setUp() conf = swift.load_conf(file=StringIO(config_file % def_config_file)) sos = swift.SwiftObjectStore( FakeSwiftConnector('fakerepo', conf=conf)) commit_amount = 10 self.commits = create_commits(length=commit_amount, marker="m") data = [(d.type_num, d.as_raw_string()) for d in self.commits] f = BytesIO() fi = BytesIO() expected = build_pack(f, data, store=sos) entries = [(sha, ofs, checksum) for ofs, _, _, sha, checksum in expected] self.pack_data = PackData.from_file(file=f, size=None) write_pack_index_v2( fi, entries, self.pack_data.calculate_checksum()) fi.seek(0) self.pack_index = load_pack_index_file('', fi) # def test_pack_info_perf(self): # dump_time = [] # load_time = [] # for i in range(0, 100): # start = time() # dumps = swift.pack_info_create(self.pack_data, self.pack_index) # dump_time.append(time() - start) # for i in range(0, 100): # start = time() # pack_infos = swift.load_pack_info('', file=BytesIO(dumps)) # load_time.append(time() - start) # print sum(dump_time) / float(len(dump_time)) # print sum(load_time) / float(len(load_time)) def test_pack_info(self): dumps = swift.pack_info_create(self.pack_data, self.pack_index) pack_infos = swift.load_pack_info('', file=BytesIO(dumps)) for obj in self.commits: self.assertIn(obj.id, pack_infos) @skipIf(missing_libs, skipmsg) class TestSwiftInfoRefsContainer(TestCase): def setUp(self): super(TestSwiftInfoRefsContainer, self).setUp() content = \ b"22effb216e3a82f97da599b8885a6cadb488b4c5\trefs/heads/master\n" + \ b"cca703b0e1399008b53a1a236d6b4584737649e4\trefs/heads/dev" self.store = {'fakerepo/info/refs': content} self.conf = swift.load_conf(file=StringIO(config_file % def_config_file)) self.fsc = FakeSwiftConnector('fakerepo', conf=self.conf) self.object_store = {} def test_init(self): """info/refs does not exists""" irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store) self.assertEqual(len(irc._refs), 0) self.fsc.store = self.store irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store) self.assertIn(b'refs/heads/dev', irc.allkeys()) self.assertIn(b'refs/heads/master', irc.allkeys()) def test_set_if_equals(self): self.fsc.store = self.store irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store) irc.set_if_equals(b'refs/heads/dev', b"cca703b0e1399008b53a1a236d6b4584737649e4", b'1'*40) self.assertEqual(irc[b'refs/heads/dev'], b'1'*40) def test_remove_if_equals(self): self.fsc.store = self.store irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store) irc.remove_if_equals(b'refs/heads/dev', b"cca703b0e1399008b53a1a236d6b4584737649e4") self.assertNotIn(b'refs/heads/dev', irc.allkeys()) @skipIf(missing_libs, skipmsg) class TestSwiftConnector(TestCase): def setUp(self): super(TestSwiftConnector, self).setUp() self.conf = swift.load_conf(file=StringIO(config_file % def_config_file)) with patch('geventhttpclient.HTTPClient.request', fake_auth_request_v1): self.conn = swift.SwiftConnector('fakerepo', conf=self.conf) def test_init_connector(self): self.assertEqual(self.conn.auth_ver, '1') self.assertEqual(self.conn.auth_url, 'http://127.0.0.1:8080/auth/v1.0') self.assertEqual(self.conn.user, 'test:tester') self.assertEqual(self.conn.password, 'testing') self.assertEqual(self.conn.root, 'fakerepo') self.assertEqual(self.conn.storage_url, 'http://127.0.0.1:8080/v1.0/AUTH_fakeuser') self.assertEqual(self.conn.token, '12' * 10) self.assertEqual(self.conn.http_timeout, 1) self.assertEqual(self.conn.http_pool_length, 1) self.assertEqual(self.conn.concurrency, 1) self.conf.set('swift', 'auth_ver', '2') self.conf.set('swift', 'auth_url', 'http://127.0.0.1:8080/auth/v2.0') with patch('geventhttpclient.HTTPClient.request', fake_auth_request_v2): conn = swift.SwiftConnector('fakerepo', conf=self.conf) self.assertEqual(conn.user, 'tester') self.assertEqual(conn.tenant, 'test') self.conf.set('swift', 'auth_ver', '1') self.conf.set('swift', 'auth_url', 'http://127.0.0.1:8080/auth/v1.0') with patch('geventhttpclient.HTTPClient.request', fake_auth_request_v1_error): self.assertRaises(swift.SwiftException, lambda: swift.SwiftConnector('fakerepo', conf=self.conf)) def test_root_exists(self): with patch('geventhttpclient.HTTPClient.request', lambda *args: Response()): self.assertEqual(self.conn.test_root_exists(), True) def test_root_not_exists(self): with patch('geventhttpclient.HTTPClient.request', lambda *args: Response(status=404)): self.assertEqual(self.conn.test_root_exists(), None) def test_create_root(self): with patch('dulwich.contrib.swift.SwiftConnector.test_root_exists', lambda *args: None): with patch('geventhttpclient.HTTPClient.request', lambda *args: Response()): self.assertEqual(self.conn.create_root(), None) def test_create_root_fails(self): with patch('dulwich.contrib.swift.SwiftConnector.test_root_exists', lambda *args: None): with patch('geventhttpclient.HTTPClient.request', lambda *args: Response(status=404)): self.assertRaises(swift.SwiftException, lambda: self.conn.create_root()) def test_get_container_objects(self): with patch('geventhttpclient.HTTPClient.request', lambda *args: Response(content=json_dumps( (({'name': 'a'}, {'name': 'b'}))))): self.assertEqual(len(self.conn.get_container_objects()), 2) def test_get_container_objects_fails(self): with patch('geventhttpclient.HTTPClient.request', lambda *args: Response(status=404)): self.assertEqual(self.conn.get_container_objects(), None) def test_get_object_stat(self): with patch('geventhttpclient.HTTPClient.request', lambda *args: Response(headers={'content-length': '10'})): self.assertEqual(self.conn.get_object_stat('a')['content-length'], '10') def test_get_object_stat_fails(self): with patch('geventhttpclient.HTTPClient.request', lambda *args: Response(status=404)): self.assertEqual(self.conn.get_object_stat('a'), None) def test_put_object(self): with patch('geventhttpclient.HTTPClient.request', lambda *args, **kwargs: Response()): self.assertEqual(self.conn.put_object('a', BytesIO(b'content')), None) def test_put_object_fails(self): with patch('geventhttpclient.HTTPClient.request', lambda *args, **kwargs: Response(status=400)): self.assertRaises(swift.SwiftException, lambda: self.conn.put_object( 'a', BytesIO(b'content'))) def test_get_object(self): with patch('geventhttpclient.HTTPClient.request', lambda *args, **kwargs: Response(content=b'content')): self.assertEqual(self.conn.get_object('a').read(), b'content') with patch('geventhttpclient.HTTPClient.request', lambda *args, **kwargs: Response(content=b'content')): self.assertEqual(self.conn.get_object('a', range='0-6'), b'content') def test_get_object_fails(self): with patch('geventhttpclient.HTTPClient.request', lambda *args, **kwargs: Response(status=404)): self.assertEqual(self.conn.get_object('a'), None) def test_del_object(self): with patch('geventhttpclient.HTTPClient.request', lambda *args: Response()): self.assertEqual(self.conn.del_object('a'), None) def test_del_root(self): with patch('dulwich.contrib.swift.SwiftConnector.del_object', lambda *args: None): with patch('dulwich.contrib.swift.SwiftConnector.' 'get_container_objects', lambda *args: ({'name': 'a'}, {'name': 'b'})): with patch('geventhttpclient.HTTPClient.request', lambda *args: Response()): self.assertEqual(self.conn.del_root(), None) @skipIf(missing_libs, skipmsg) class SwiftObjectStoreTests(ObjectStoreTests, TestCase): def setUp(self): TestCase.setUp(self) conf = swift.load_conf(file=StringIO(config_file % def_config_file)) fsc = FakeSwiftConnector('fakerepo', conf=conf) self.store = swift.SwiftObjectStore(fsc) diff --git a/dulwich/tests/__init__.py b/dulwich/tests/__init__.py index 66caf44f..8a892691 100644 --- a/dulwich/tests/__init__.py +++ b/dulwich/tests/__init__.py @@ -1,157 +1,172 @@ # __init__.py -- The tests for dulwich # Copyright (C) 2007 James Westby # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; version 2 # of the License or (at your option) any later version of # the License. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. """Tests for Dulwich.""" import doctest import os import shutil import subprocess import sys import tempfile # If Python itself provides an exception, use that import unittest -from unittest import SkipTest, TestCase, skipIf, expectedFailure +from unittest import SkipTest, TestCase as _TestCase, skipIf, expectedFailure + + +class TestCase(_TestCase): + + def setUp(self): + super(TestCase, self).setUp() + self._old_home = os.environ.get("HOME") + os.environ["HOME"] = "/nonexistant" + + def tearDown(self): + super(TestCase, self).tearDown() + if self._old_home: + os.environ["HOME"] = self._old_home + else: + del os.environ["HOME"] class BlackboxTestCase(TestCase): """Blackbox testing.""" # TODO(jelmer): Include more possible binary paths. bin_directories = [os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", "bin")), '/usr/bin', '/usr/local/bin'] def bin_path(self, name): """Determine the full path of a binary. :param name: Name of the script :return: Full path """ for d in self.bin_directories: p = os.path.join(d, name) if os.path.isfile(p): return p else: raise SkipTest("Unable to find binary %s" % name) def run_command(self, name, args): """Run a Dulwich command. :param name: Name of the command, as it exists in bin/ :param args: Arguments to the command """ env = dict(os.environ) env["PYTHONPATH"] = os.pathsep.join(sys.path) # Since they don't have any extensions, Windows can't recognize # executablility of the Python files in /bin. Even then, we'd have to # expect the user to set up file associations for .py files. # # Save us from all that headache and call python with the bin script. argv = [sys.executable, self.bin_path(name)] + args return subprocess.Popen(argv, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, env=env) def self_test_suite(): names = [ 'archive', 'blackbox', 'client', 'config', 'diff_tree', 'fastexport', 'file', 'grafts', 'greenthreads', 'hooks', 'index', 'lru_cache', 'objects', 'objectspec', 'object_store', 'missing_obj_finder', 'pack', 'patch', 'porcelain', 'protocol', 'reflog', 'refs', 'repository', 'server', 'walk', 'web', ] module_names = ['dulwich.tests.test_' + name for name in names] loader = unittest.TestLoader() return loader.loadTestsFromNames(module_names) def tutorial_test_suite(): tutorial = [ 'introduction', 'file-format', 'repo', 'object-store', 'remote', 'conclusion', ] tutorial_files = ["../../docs/tutorial/%s.txt" % name for name in tutorial] def setup(test): test.__old_cwd = os.getcwd() test.__dulwich_tempdir = tempfile.mkdtemp() os.chdir(test.__dulwich_tempdir) def teardown(test): os.chdir(test.__old_cwd) shutil.rmtree(test.__dulwich_tempdir) return doctest.DocFileSuite(setUp=setup, tearDown=teardown, *tutorial_files) def nocompat_test_suite(): result = unittest.TestSuite() result.addTests(self_test_suite()) from dulwich.contrib import test_suite as contrib_test_suite if sys.version_info[0] == 2: result.addTests(tutorial_test_suite()) result.addTests(contrib_test_suite()) return result def compat_test_suite(): result = unittest.TestSuite() from dulwich.tests.compat import test_suite as compat_test_suite result.addTests(compat_test_suite()) return result def test_suite(): result = unittest.TestSuite() result.addTests(self_test_suite()) if sys.version_info[0] == 2 and sys.platform != 'win32': result.addTests(tutorial_test_suite()) from dulwich.tests.compat import test_suite as compat_test_suite result.addTests(compat_test_suite()) from dulwich.contrib import test_suite as contrib_test_suite result.addTests(contrib_test_suite()) return result diff --git a/dulwich/tests/test_config.py b/dulwich/tests/test_config.py index aeef7266..81cbf7f4 100644 --- a/dulwich/tests/test_config.py +++ b/dulwich/tests/test_config.py @@ -1,316 +1,308 @@ # test_config.py -- Tests for reading and writing configuration files # Copyright (C) 2011 Jelmer Vernooij # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # or (at your option) a later version of the License. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. """Tests for reading and writing configuration files.""" from io import BytesIO import os from dulwich.config import ( ConfigDict, ConfigFile, StackedConfig, _check_section_name, _check_variable_name, _format_string, _escape_value, _parse_string, parse_submodules, ) from dulwich.tests import ( TestCase, ) -class ConfigTestCase(TestCase): - - def setUp(self): - super(ConfigTestCase, self).setUp() - self.addCleanup(os.environ.__setitem__, "HOME", os.environ["HOME"]) - os.environ["HOME"] = "/nonexistant" - - -class ConfigFileTests(ConfigTestCase): +class ConfigFileTests(TestCase): def from_file(self, text): return ConfigFile.from_file(BytesIO(text)) def test_empty(self): ConfigFile() def test_eq(self): self.assertEqual(ConfigFile(), ConfigFile()) def test_default_config(self): cf = self.from_file(b"""[core] repositoryformatversion = 0 filemode = true bare = false logallrefupdates = true """) self.assertEqual(ConfigFile({(b"core", ): { b"repositoryformatversion": b"0", b"filemode": b"true", b"bare": b"false", b"logallrefupdates": b"true"}}), cf) def test_from_file_empty(self): cf = self.from_file(b"") self.assertEqual(ConfigFile(), cf) def test_empty_line_before_section(self): cf = self.from_file(b"\n[section]\n") self.assertEqual(ConfigFile({(b"section", ): {}}), cf) def test_comment_before_section(self): cf = self.from_file(b"# foo\n[section]\n") self.assertEqual(ConfigFile({(b"section", ): {}}), cf) def test_comment_after_section(self): cf = self.from_file(b"[section] # foo\n") self.assertEqual(ConfigFile({(b"section", ): {}}), cf) def test_comment_after_variable(self): cf = self.from_file(b"[section]\nbar= foo # a comment\n") self.assertEqual(ConfigFile({(b"section", ): {b"bar": b"foo"}}), cf) def test_from_file_section(self): cf = self.from_file(b"[core]\nfoo = bar\n") self.assertEqual(b"bar", cf.get((b"core", ), b"foo")) self.assertEqual(b"bar", cf.get((b"core", b"foo"), b"foo")) def test_from_file_section_case_insensitive(self): cf = self.from_file(b"[cOre]\nfOo = bar\n") self.assertEqual(b"bar", cf.get((b"core", ), b"foo")) self.assertEqual(b"bar", cf.get((b"core", b"foo"), b"foo")) def test_from_file_with_mixed_quoted(self): cf = self.from_file(b"[core]\nfoo = \"bar\"la\n") self.assertEqual(b"barla", cf.get((b"core", ), b"foo")) def test_from_file_with_open_quoted(self): self.assertRaises(ValueError, self.from_file, b"[core]\nfoo = \"bar\n") def test_from_file_with_quotes(self): cf = self.from_file( b"[core]\n" b'foo = " bar"\n') self.assertEqual(b" bar", cf.get((b"core", ), b"foo")) def test_from_file_with_interrupted_line(self): cf = self.from_file( b"[core]\n" b'foo = bar\\\n' b' la\n') self.assertEqual(b"barla", cf.get((b"core", ), b"foo")) def test_from_file_with_boolean_setting(self): cf = self.from_file( b"[core]\n" b'foo\n') self.assertEqual(b"true", cf.get((b"core", ), b"foo")) def test_from_file_subsection(self): cf = self.from_file(b"[branch \"foo\"]\nfoo = bar\n") self.assertEqual(b"bar", cf.get((b"branch", b"foo"), b"foo")) def test_from_file_subsection_invalid(self): self.assertRaises(ValueError, self.from_file, b"[branch \"foo]\nfoo = bar\n") def test_from_file_subsection_not_quoted(self): cf = self.from_file(b"[branch.foo]\nfoo = bar\n") self.assertEqual(b"bar", cf.get((b"branch", b"foo"), b"foo")) def test_write_to_file_empty(self): c = ConfigFile() f = BytesIO() c.write_to_file(f) self.assertEqual(b"", f.getvalue()) def test_write_to_file_section(self): c = ConfigFile() c.set((b"core", ), b"foo", b"bar") f = BytesIO() c.write_to_file(f) self.assertEqual(b"[core]\n\tfoo = bar\n", f.getvalue()) def test_write_to_file_subsection(self): c = ConfigFile() c.set((b"branch", b"blie"), b"foo", b"bar") f = BytesIO() c.write_to_file(f) self.assertEqual(b"[branch \"blie\"]\n\tfoo = bar\n", f.getvalue()) def test_same_line(self): cf = self.from_file(b"[branch.foo] foo = bar\n") self.assertEqual(b"bar", cf.get((b"branch", b"foo"), b"foo")) #@expectedFailure def test_quoted(self): cf = self.from_file(b"""[gui] fontdiff = -family \\\"Ubuntu Mono\\\" -size 11 -weight normal -slant roman -underline 0 -overstrike 0 """) self.assertEqual(ConfigFile({(b'gui', ): { b'fontdiff': b'-family "Ubuntu Mono" -size 11 -weight normal -slant roman -underline 0 -overstrike 0', }}), cf) -class ConfigDictTests(ConfigTestCase): +class ConfigDictTests(TestCase): def test_get_set(self): cd = ConfigDict() self.assertRaises(KeyError, cd.get, b"foo", b"core") cd.set((b"core", ), b"foo", b"bla") self.assertEqual(b"bla", cd.get((b"core", ), b"foo")) cd.set((b"core", ), b"foo", b"bloe") self.assertEqual(b"bloe", cd.get((b"core", ), b"foo")) def test_get_boolean(self): cd = ConfigDict() cd.set((b"core", ), b"foo", b"true") self.assertTrue(cd.get_boolean((b"core", ), b"foo")) cd.set((b"core", ), b"foo", b"false") self.assertFalse(cd.get_boolean((b"core", ), b"foo")) cd.set((b"core", ), b"foo", b"invalid") self.assertRaises(ValueError, cd.get_boolean, (b"core", ), b"foo") def test_dict(self): cd = ConfigDict() cd.set((b"core", ), b"foo", b"bla") cd.set((b"core2", ), b"foo", b"bloe") self.assertEqual([(b"core", ), (b"core2", )], list(cd.keys())) self.assertEqual(cd[(b"core", )], {b'foo': b'bla'}) cd[b'a'] = b'b' self.assertEqual(cd[b'a'], b'b') def test_iteritems(self): cd = ConfigDict() cd.set((b"core", ), b"foo", b"bla") cd.set((b"core2", ), b"foo", b"bloe") self.assertEqual( [(b'foo', b'bla')], list(cd.iteritems((b"core", )))) def test_iteritems_nonexistant(self): cd = ConfigDict() cd.set((b"core2", ), b"foo", b"bloe") self.assertEqual([], list(cd.iteritems((b"core", )))) def test_itersections(self): cd = ConfigDict() cd.set((b"core2", ), b"foo", b"bloe") self.assertEqual([(b"core2", )], list(cd.itersections())) -class StackedConfigTests(ConfigTestCase): +class StackedConfigTests(TestCase): def test_default_backends(self): StackedConfig.default_backends() class EscapeValueTests(TestCase): def test_nothing(self): self.assertEqual(b"foo", _escape_value(b"foo")) def test_backslash(self): self.assertEqual(b"foo\\\\", _escape_value(b"foo\\")) def test_newline(self): self.assertEqual(b"foo\\n", _escape_value(b"foo\n")) class FormatStringTests(TestCase): def test_quoted(self): self.assertEqual(b'" foo"', _format_string(b" foo")) self.assertEqual(b'"\\tfoo"', _format_string(b"\tfoo")) def test_not_quoted(self): self.assertEqual(b'foo', _format_string(b"foo")) self.assertEqual(b'foo bar', _format_string(b"foo bar")) class ParseStringTests(TestCase): def test_quoted(self): self.assertEqual(b' foo', _parse_string(b'" foo"')) self.assertEqual(b'\tfoo', _parse_string(b'"\\tfoo"')) def test_not_quoted(self): self.assertEqual(b'foo', _parse_string(b"foo")) self.assertEqual(b'foo bar', _parse_string(b"foo bar")) def test_nothing(self): self.assertEqual(b"", _parse_string(b'')) def test_tab(self): self.assertEqual(b"\tbar\t", _parse_string(b"\\tbar\\t")) def test_newline(self): self.assertEqual(b"\nbar\t", _parse_string(b"\\nbar\\t\t")) def test_quote(self): self.assertEqual(b"\"foo\"", _parse_string(b"\\\"foo\\\"")) class CheckVariableNameTests(TestCase): def test_invalid(self): self.assertFalse(_check_variable_name(b"foo ")) self.assertFalse(_check_variable_name(b"bar,bar")) self.assertFalse(_check_variable_name(b"bar.bar")) def test_valid(self): self.assertTrue(_check_variable_name(b"FOO")) self.assertTrue(_check_variable_name(b"foo")) self.assertTrue(_check_variable_name(b"foo-bar")) class CheckSectionNameTests(TestCase): def test_invalid(self): self.assertFalse(_check_section_name(b"foo ")) self.assertFalse(_check_section_name(b"bar,bar")) def test_valid(self): self.assertTrue(_check_section_name(b"FOO")) self.assertTrue(_check_section_name(b"foo")) self.assertTrue(_check_section_name(b"foo-bar")) self.assertTrue(_check_section_name(b"bar.bar")) -class SubmodulesTests(ConfigTestCase): +class SubmodulesTests(TestCase): def testSubmodules(self): cf = ConfigFile.from_file(BytesIO(b"""\ [submodule "core/lib"] path = core/lib url = https://github.com/phhusson/QuasselC.git """)) got = list(parse_submodules(cf)) self.assertEqual([ (b'core/lib', b'https://github.com/phhusson/QuasselC.git', b'core/lib')], got) diff --git a/dulwich/tests/test_repository.py b/dulwich/tests/test_repository.py index e8089815..8fa24f23 100644 --- a/dulwich/tests/test_repository.py +++ b/dulwich/tests/test_repository.py @@ -1,814 +1,810 @@ # -*- coding: utf-8 -*- # test_repository.py -- tests for repository.py # Copyright (C) 2007 James Westby # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; version 2 # of the License or (at your option) any later version of # the License. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. """Tests for the repository.""" from contextlib import closing import locale import os import stat import shutil import sys import tempfile import warnings from dulwich import errors from dulwich.object_store import ( tree_lookup_path, ) from dulwich import objects from dulwich.config import Config from dulwich.errors import NotGitRepository from dulwich.repo import ( Repo, MemoryRepo, ) from dulwich.tests import ( TestCase, skipIf, ) from dulwich.tests.utils import ( open_repo, tear_down_repo, setup_warning_catcher, ) missing_sha = b'b91fa4d900e17e99b433218e988c4eb4a3e9a097' class CreateRepositoryTests(TestCase): def assertFileContentsEqual(self, expected, repo, path): f = repo.get_named_file(path) if not f: self.assertEqual(expected, None) else: with f: self.assertEqual(expected, f.read()) def _check_repo_contents(self, repo, expect_bare): self.assertEqual(expect_bare, repo.bare) self.assertFileContentsEqual(b'Unnamed repository', repo, 'description') self.assertFileContentsEqual(b'', repo, os.path.join('info', 'exclude')) self.assertFileContentsEqual(None, repo, 'nonexistent file') barestr = b'bare = ' + str(expect_bare).lower().encode('ascii') with repo.get_named_file('config') as f: config_text = f.read() self.assertTrue(barestr in config_text, "%r" % config_text) def test_create_memory(self): repo = MemoryRepo.init_bare([], {}) self._check_repo_contents(repo, True) def test_create_disk_bare(self): tmp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) repo = Repo.init_bare(tmp_dir) self.assertEqual(tmp_dir, repo._controldir) self._check_repo_contents(repo, True) def test_create_disk_non_bare(self): tmp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) repo = Repo.init(tmp_dir) self.assertEqual(os.path.join(tmp_dir, '.git'), repo._controldir) self._check_repo_contents(repo, False) class RepositoryRootTests(TestCase): def mkdtemp(self): return tempfile.mkdtemp() def open_repo(self, name): temp_dir = self.mkdtemp() repo = open_repo(name, temp_dir) self.addCleanup(tear_down_repo, repo) return repo def test_simple_props(self): r = self.open_repo('a.git') self.assertEqual(r.controldir(), r.path) def test_setitem(self): r = self.open_repo('a.git') r[b"refs/tags/foo"] = b'a90fa2d900a17e99b433217e988c4eb4a2e9a097' self.assertEqual(b'a90fa2d900a17e99b433217e988c4eb4a2e9a097', r[b"refs/tags/foo"].id) def test_getitem_unicode(self): r = self.open_repo('a.git') test_keys = [ (b'refs/heads/master', True), (b'a90fa2d900a17e99b433217e988c4eb4a2e9a097', True), (b'11' * 19 + b'--', False), ] for k, contained in test_keys: self.assertEqual(k in r, contained) # Avoid deprecation warning under Py3.2+ if getattr(self, 'assertRaisesRegex', None): assertRaisesRegexp = self.assertRaisesRegex else: assertRaisesRegexp = self.assertRaisesRegexp for k, _ in test_keys: assertRaisesRegexp( TypeError, "'name' must be bytestring, not int", r.__getitem__, 12 ) def test_delitem(self): r = self.open_repo('a.git') del r[b'refs/heads/master'] self.assertRaises(KeyError, lambda: r[b'refs/heads/master']) del r[b'HEAD'] self.assertRaises(KeyError, lambda: r[b'HEAD']) self.assertRaises(ValueError, r.__delitem__, b'notrefs/foo') def test_get_refs(self): r = self.open_repo('a.git') self.assertEqual({ b'HEAD': b'a90fa2d900a17e99b433217e988c4eb4a2e9a097', b'refs/heads/master': b'a90fa2d900a17e99b433217e988c4eb4a2e9a097', b'refs/tags/mytag': b'28237f4dc30d0d462658d6b937b08a0f0b6ef55a', b'refs/tags/mytag-packed': b'b0931cadc54336e78a1d980420e3268903b57a50', }, r.get_refs()) def test_head(self): r = self.open_repo('a.git') self.assertEqual(r.head(), b'a90fa2d900a17e99b433217e988c4eb4a2e9a097') def test_get_object(self): r = self.open_repo('a.git') obj = r.get_object(r.head()) self.assertEqual(obj.type_name, b'commit') def test_get_object_non_existant(self): r = self.open_repo('a.git') self.assertRaises(KeyError, r.get_object, missing_sha) def test_contains_object(self): r = self.open_repo('a.git') self.assertTrue(r.head() in r) def test_contains_ref(self): r = self.open_repo('a.git') self.assertTrue(b"HEAD" in r) def test_get_no_description(self): r = self.open_repo('a.git') self.assertIs(None, r.get_description()) def test_get_description(self): r = self.open_repo('a.git') with open(os.path.join(r.path, 'description'), 'wb') as f: f.write(b"Some description") self.assertEqual(b"Some description", r.get_description()) def test_set_description(self): r = self.open_repo('a.git') description = b"Some description" r.set_description(description) self.assertEqual(description, r.get_description()) def test_contains_missing(self): r = self.open_repo('a.git') self.assertFalse(b"bar" in r) def test_get_peeled(self): # unpacked ref r = self.open_repo('a.git') tag_sha = b'28237f4dc30d0d462658d6b937b08a0f0b6ef55a' self.assertNotEqual(r[tag_sha].sha().hexdigest(), r.head()) self.assertEqual(r.get_peeled(b'refs/tags/mytag'), r.head()) # packed ref with cached peeled value packed_tag_sha = b'b0931cadc54336e78a1d980420e3268903b57a50' parent_sha = r[r.head()].parents[0] self.assertNotEqual(r[packed_tag_sha].sha().hexdigest(), parent_sha) self.assertEqual(r.get_peeled(b'refs/tags/mytag-packed'), parent_sha) # TODO: add more corner cases to test repo def test_get_peeled_not_tag(self): r = self.open_repo('a.git') self.assertEqual(r.get_peeled(b'HEAD'), r.head()) def test_get_walker(self): r = self.open_repo('a.git') # include defaults to [r.head()] self.assertEqual([e.commit.id for e in r.get_walker()], [r.head(), b'2a72d929692c41d8554c07f6301757ba18a65d91']) self.assertEqual( [e.commit.id for e in r.get_walker([b'2a72d929692c41d8554c07f6301757ba18a65d91'])], [b'2a72d929692c41d8554c07f6301757ba18a65d91']) self.assertEqual( [e.commit.id for e in r.get_walker(b'2a72d929692c41d8554c07f6301757ba18a65d91')], [b'2a72d929692c41d8554c07f6301757ba18a65d91']) def test_clone(self): r = self.open_repo('a.git') tmp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) with closing(r.clone(tmp_dir, mkdir=False)) as t: self.assertEqual({ b'HEAD': b'a90fa2d900a17e99b433217e988c4eb4a2e9a097', b'refs/remotes/origin/master': b'a90fa2d900a17e99b433217e988c4eb4a2e9a097', b'refs/heads/master': b'a90fa2d900a17e99b433217e988c4eb4a2e9a097', b'refs/tags/mytag': b'28237f4dc30d0d462658d6b937b08a0f0b6ef55a', b'refs/tags/mytag-packed': b'b0931cadc54336e78a1d980420e3268903b57a50', }, t.refs.as_dict()) shas = [e.commit.id for e in r.get_walker()] self.assertEqual(shas, [t.head(), b'2a72d929692c41d8554c07f6301757ba18a65d91']) def test_clone_no_head(self): temp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, temp_dir) repo_dir = os.path.join(os.path.dirname(__file__), 'data', 'repos') dest_dir = os.path.join(temp_dir, 'a.git') shutil.copytree(os.path.join(repo_dir, 'a.git'), dest_dir, symlinks=True) r = Repo(dest_dir) del r.refs[b"refs/heads/master"] del r.refs[b"HEAD"] t = r.clone(os.path.join(temp_dir, 'b.git'), mkdir=True) self.assertEqual({ b'refs/tags/mytag': b'28237f4dc30d0d462658d6b937b08a0f0b6ef55a', b'refs/tags/mytag-packed': b'b0931cadc54336e78a1d980420e3268903b57a50', }, t.refs.as_dict()) def test_clone_empty(self): """Test clone() doesn't crash if HEAD points to a non-existing ref. This simulates cloning server-side bare repository either when it is still empty or if user renames master branch and pushes private repo to the server. Non-bare repo HEAD always points to an existing ref. """ r = self.open_repo('empty.git') tmp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) r.clone(tmp_dir, mkdir=False, bare=True) def test_merge_history(self): r = self.open_repo('simple_merge.git') shas = [e.commit.id for e in r.get_walker()] self.assertEqual(shas, [b'5dac377bdded4c9aeb8dff595f0faeebcc8498cc', b'ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd', b'4cffe90e0a41ad3f5190079d7c8f036bde29cbe6', b'60dacdc733de308bb77bb76ce0fb0f9b44c9769e', b'0d89f20333fbb1d2f3a94da77f4981373d8f4310']) def test_out_of_order_merge(self): """Test that revision history is ordered by date, not parent order.""" r = self.open_repo('ooo_merge.git') shas = [e.commit.id for e in r.get_walker()] self.assertEqual(shas, [b'7601d7f6231db6a57f7bbb79ee52e4d462fd44d1', b'f507291b64138b875c28e03469025b1ea20bc614', b'fb5b0425c7ce46959bec94d54b9a157645e114f5', b'f9e39b120c68182a4ba35349f832d0e4e61f485c']) def test_get_tags_empty(self): r = self.open_repo('ooo_merge.git') self.assertEqual({}, r.refs.as_dict(b'refs/tags')) def test_get_config(self): r = self.open_repo('ooo_merge.git') self.assertIsInstance(r.get_config(), Config) def test_get_config_stack(self): - self.addCleanup(os.environ.__setitem__, "HOME", os.environ["HOME"]) - os.environ["HOME"] = "/nonexistant" r = self.open_repo('ooo_merge.git') self.assertIsInstance(r.get_config_stack(), Config) @skipIf(not getattr(os, 'symlink', None), 'Requires symlink support') def test_submodule(self): temp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, temp_dir) repo_dir = os.path.join(os.path.dirname(__file__), 'data', 'repos') shutil.copytree(os.path.join(repo_dir, 'a.git'), os.path.join(temp_dir, 'a.git'), symlinks=True) rel = os.path.relpath(os.path.join(repo_dir, 'submodule'), temp_dir) os.symlink(os.path.join(rel, 'dotgit'), os.path.join(temp_dir, '.git')) with closing(Repo(temp_dir)) as r: self.assertEqual(r.head(), b'a90fa2d900a17e99b433217e988c4eb4a2e9a097') def test_common_revisions(self): """ This test demonstrates that ``find_common_revisions()`` actually returns common heads, not revisions; dulwich already uses ``find_common_revisions()`` in such a manner (see ``Repo.fetch_objects()``). """ expected_shas = set([b'60dacdc733de308bb77bb76ce0fb0f9b44c9769e']) # Source for objects. r_base = self.open_repo('simple_merge.git') # Re-create each-side of the merge in simple_merge.git. # # Since the trees and blobs are missing, the repository created is # corrupted, but we're only checking for commits for the purpose of this # test, so it's immaterial. r1_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, r1_dir) r1_commits = [b'ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd', # HEAD b'60dacdc733de308bb77bb76ce0fb0f9b44c9769e', b'0d89f20333fbb1d2f3a94da77f4981373d8f4310'] r2_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, r2_dir) r2_commits = [b'4cffe90e0a41ad3f5190079d7c8f036bde29cbe6', # HEAD b'60dacdc733de308bb77bb76ce0fb0f9b44c9769e', b'0d89f20333fbb1d2f3a94da77f4981373d8f4310'] r1 = Repo.init_bare(r1_dir) for c in r1_commits: r1.object_store.add_object(r_base.get_object(c)) r1.refs[b'HEAD'] = r1_commits[0] r2 = Repo.init_bare(r2_dir) for c in r2_commits: r2.object_store.add_object(r_base.get_object(c)) r2.refs[b'HEAD'] = r2_commits[0] # Finally, the 'real' testing! shas = r2.object_store.find_common_revisions(r1.get_graph_walker()) self.assertEqual(set(shas), expected_shas) shas = r1.object_store.find_common_revisions(r2.get_graph_walker()) self.assertEqual(set(shas), expected_shas) def test_shell_hook_pre_commit(self): if os.name != 'posix': self.skipTest('shell hook tests requires POSIX shell') pre_commit_fail = """#!/bin/sh exit 1 """ pre_commit_success = """#!/bin/sh exit 0 """ repo_dir = os.path.join(self.mkdtemp()) r = Repo.init(repo_dir) self.addCleanup(shutil.rmtree, repo_dir) pre_commit = os.path.join(r.controldir(), 'hooks', 'pre-commit') with open(pre_commit, 'w') as f: f.write(pre_commit_fail) os.chmod(pre_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) self.assertRaises(errors.CommitError, r.do_commit, 'failed commit', committer='Test Committer ', author='Test Author ', commit_timestamp=12345, commit_timezone=0, author_timestamp=12345, author_timezone=0) with open(pre_commit, 'w') as f: f.write(pre_commit_success) os.chmod(pre_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) commit_sha = r.do_commit( b'empty commit', committer=b'Test Committer ', author=b'Test Author ', commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0) self.assertEqual([], r[commit_sha].parents) def test_shell_hook_commit_msg(self): if os.name != 'posix': self.skipTest('shell hook tests requires POSIX shell') commit_msg_fail = """#!/bin/sh exit 1 """ commit_msg_success = """#!/bin/sh exit 0 """ repo_dir = self.mkdtemp() r = Repo.init(repo_dir) self.addCleanup(shutil.rmtree, repo_dir) commit_msg = os.path.join(r.controldir(), 'hooks', 'commit-msg') with open(commit_msg, 'w') as f: f.write(commit_msg_fail) os.chmod(commit_msg, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) self.assertRaises(errors.CommitError, r.do_commit, b'failed commit', committer=b'Test Committer ', author=b'Test Author ', commit_timestamp=12345, commit_timezone=0, author_timestamp=12345, author_timezone=0) with open(commit_msg, 'w') as f: f.write(commit_msg_success) os.chmod(commit_msg, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) commit_sha = r.do_commit( b'empty commit', committer=b'Test Committer ', author=b'Test Author ', commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0) self.assertEqual([], r[commit_sha].parents) def test_shell_hook_post_commit(self): if os.name != 'posix': self.skipTest('shell hook tests requires POSIX shell') repo_dir = self.mkdtemp() r = Repo.init(repo_dir) self.addCleanup(shutil.rmtree, repo_dir) (fd, path) = tempfile.mkstemp(dir=repo_dir) os.close(fd) post_commit_msg = """#!/bin/sh rm """ + path + """ """ root_sha = r.do_commit( b'empty commit', committer=b'Test Committer ', author=b'Test Author ', commit_timestamp=12345, commit_timezone=0, author_timestamp=12345, author_timezone=0) self.assertEqual([], r[root_sha].parents) post_commit = os.path.join(r.controldir(), 'hooks', 'post-commit') with open(post_commit, 'wb') as f: f.write(post_commit_msg.encode(locale.getpreferredencoding())) os.chmod(post_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) commit_sha = r.do_commit( b'empty commit', committer=b'Test Committer ', author=b'Test Author ', commit_timestamp=12345, commit_timezone=0, author_timestamp=12345, author_timezone=0) self.assertEqual([root_sha], r[commit_sha].parents) self.assertFalse(os.path.exists(path)) post_commit_msg_fail = """#!/bin/sh exit 1 """ with open(post_commit, 'w') as f: f.write(post_commit_msg_fail) os.chmod(post_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) warnings.simplefilter("always", UserWarning) self.addCleanup(warnings.resetwarnings) warnings_list, restore_warnings = setup_warning_catcher() self.addCleanup(restore_warnings) commit_sha2 = r.do_commit( b'empty commit', committer=b'Test Committer ', author=b'Test Author ', commit_timestamp=12345, commit_timezone=0, author_timestamp=12345, author_timezone=0) self.assertEqual(len(warnings_list), 1, warnings_list) self.assertIsInstance(warnings_list[-1], UserWarning) self.assertTrue("post-commit hook failed: " in str(warnings_list[-1])) self.assertEqual([commit_sha], r[commit_sha2].parents) def test_as_dict(self): def check(repo): self.assertEqual(repo.refs.subkeys(b'refs/tags'), repo.refs.subkeys(b'refs/tags/')) self.assertEqual(repo.refs.as_dict(b'refs/tags'), repo.refs.as_dict(b'refs/tags/')) self.assertEqual(repo.refs.as_dict(b'refs/heads'), repo.refs.as_dict(b'refs/heads/')) bare = self.open_repo('a.git') tmp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) with closing(bare.clone(tmp_dir, mkdir=False)) as nonbare: check(nonbare) check(bare) class BuildRepoRootTests(TestCase): """Tests that build on-disk repos from scratch. Repos live in a temp dir and are torn down after each test. They start with a single commit in master having single file named 'a'. """ def get_repo_dir(self): return os.path.join(tempfile.mkdtemp(), 'test') def setUp(self): super(BuildRepoRootTests, self).setUp() self._repo_dir = self.get_repo_dir() os.makedirs(self._repo_dir) r = self._repo = Repo.init(self._repo_dir) self.addCleanup(tear_down_repo, r) self.assertFalse(r.bare) self.assertEqual(b'ref: refs/heads/master', r.refs.read_ref(b'HEAD')) self.assertRaises(KeyError, lambda: r.refs[b'refs/heads/master']) with open(os.path.join(r.path, 'a'), 'wb') as f: f.write(b'file contents') r.stage(['a']) commit_sha = r.do_commit(b'msg', committer=b'Test Committer ', author=b'Test Author ', commit_timestamp=12345, commit_timezone=0, author_timestamp=12345, author_timezone=0) self.assertEqual([], r[commit_sha].parents) self._root_commit = commit_sha def test_build_repo(self): r = self._repo self.assertEqual(b'ref: refs/heads/master', r.refs.read_ref(b'HEAD')) self.assertEqual(self._root_commit, r.refs[b'refs/heads/master']) expected_blob = objects.Blob.from_string(b'file contents') self.assertEqual(expected_blob.data, r[expected_blob.id].data) actual_commit = r[self._root_commit] self.assertEqual(b'msg', actual_commit.message) def test_commit_modified(self): r = self._repo with open(os.path.join(r.path, 'a'), 'wb') as f: f.write(b'new contents') r.stage(['a']) commit_sha = r.do_commit(b'modified a', committer=b'Test Committer ', author=b'Test Author ', commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0) self.assertEqual([self._root_commit], r[commit_sha].parents) a_mode, a_id = tree_lookup_path(r.get_object, r[commit_sha].tree, b'a') self.assertEqual(stat.S_IFREG | 0o644, a_mode) self.assertEqual(b'new contents', r[a_id].data) @skipIf(not getattr(os, 'symlink', None), 'Requires symlink support') def test_commit_symlink(self): r = self._repo os.symlink('a', os.path.join(r.path, 'b')) r.stage(['a', 'b']) commit_sha = r.do_commit(b'Symlink b', committer=b'Test Committer ', author=b'Test Author ', commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0) self.assertEqual([self._root_commit], r[commit_sha].parents) b_mode, b_id = tree_lookup_path(r.get_object, r[commit_sha].tree, b'b') self.assertTrue(stat.S_ISLNK(b_mode)) self.assertEqual(b'a', r[b_id].data) def test_commit_deleted(self): r = self._repo os.remove(os.path.join(r.path, 'a')) r.stage(['a']) commit_sha = r.do_commit(b'deleted a', committer=b'Test Committer ', author=b'Test Author ', commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0) self.assertEqual([self._root_commit], r[commit_sha].parents) self.assertEqual([], list(r.open_index())) tree = r[r[commit_sha].tree] self.assertEqual([], list(tree.iteritems())) def test_commit_follows(self): r = self._repo r.refs.set_symbolic_ref(b'HEAD', b'refs/heads/bla') commit_sha = r.do_commit(b'commit with strange character', committer=b'Test Committer ', author=b'Test Author ', commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ref=b'HEAD') self.assertEqual(commit_sha, r[b'refs/heads/bla'].id) def test_commit_encoding(self): r = self._repo commit_sha = r.do_commit(b'commit with strange character \xee', committer=b'Test Committer ', author=b'Test Author ', commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, encoding=b"iso8859-1") self.assertEqual(b"iso8859-1", r[commit_sha].encoding) def test_commit_config_identity(self): - self.addCleanup(os.environ.__setitem__, "HOME", os.environ["HOME"]) - os.environ["HOME"] = "/nonexistant" # commit falls back to the users' identity if it wasn't specified r = self._repo c = r.get_config() c.set((b"user", ), b"name", b"Jelmer") c.set((b"user", ), b"email", b"jelmer@apache.org") c.write_to_path() commit_sha = r.do_commit(b'message') self.assertEqual( b"Jelmer ", r[commit_sha].author) self.assertEqual( b"Jelmer ", r[commit_sha].committer) def test_commit_config_identity_in_memoryrepo(self): # commit falls back to the users' identity if it wasn't specified r = MemoryRepo.init_bare([], {}) c = r.get_config() c.set((b"user", ), b"name", b"Jelmer") c.set((b"user", ), b"email", b"jelmer@apache.org") commit_sha = r.do_commit(b'message', tree=objects.Tree().id) self.assertEqual( b"Jelmer ", r[commit_sha].author) self.assertEqual( b"Jelmer ", r[commit_sha].committer) def test_commit_fail_ref(self): r = self._repo def set_if_equals(name, old_ref, new_ref): return False r.refs.set_if_equals = set_if_equals def add_if_new(name, new_ref): self.fail('Unexpected call to add_if_new') r.refs.add_if_new = add_if_new old_shas = set(r.object_store) self.assertRaises(errors.CommitError, r.do_commit, b'failed commit', committer=b'Test Committer ', author=b'Test Author ', commit_timestamp=12345, commit_timezone=0, author_timestamp=12345, author_timezone=0) new_shas = set(r.object_store) - old_shas self.assertEqual(1, len(new_shas)) # Check that the new commit (now garbage) was added. new_commit = r[new_shas.pop()] self.assertEqual(r[self._root_commit].tree, new_commit.tree) self.assertEqual(b'failed commit', new_commit.message) def test_commit_branch(self): r = self._repo commit_sha = r.do_commit(b'commit to branch', committer=b'Test Committer ', author=b'Test Author ', commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ref=b"refs/heads/new_branch") self.assertEqual(self._root_commit, r[b"HEAD"].id) self.assertEqual(commit_sha, r[b"refs/heads/new_branch"].id) self.assertEqual([], r[commit_sha].parents) self.assertTrue(b"refs/heads/new_branch" in r) new_branch_head = commit_sha commit_sha = r.do_commit(b'commit to branch 2', committer=b'Test Committer ', author=b'Test Author ', commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ref=b"refs/heads/new_branch") self.assertEqual(self._root_commit, r[b"HEAD"].id) self.assertEqual(commit_sha, r[b"refs/heads/new_branch"].id) self.assertEqual([new_branch_head], r[commit_sha].parents) def test_commit_merge_heads(self): r = self._repo merge_1 = r.do_commit(b'commit to branch 2', committer=b'Test Committer ', author=b'Test Author ', commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ref=b"refs/heads/new_branch") commit_sha = r.do_commit(b'commit with merge', committer=b'Test Committer ', author=b'Test Author ', commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, merge_heads=[merge_1]) self.assertEqual( [self._root_commit, merge_1], r[commit_sha].parents) def test_commit_dangling_commit(self): r = self._repo old_shas = set(r.object_store) old_refs = r.get_refs() commit_sha = r.do_commit(b'commit with no ref', committer=b'Test Committer ', author=b'Test Author ', commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ref=None) new_shas = set(r.object_store) - old_shas # New sha is added, but no new refs self.assertEqual(1, len(new_shas)) new_commit = r[new_shas.pop()] self.assertEqual(r[self._root_commit].tree, new_commit.tree) self.assertEqual([], r[commit_sha].parents) self.assertEqual(old_refs, r.get_refs()) def test_commit_dangling_commit_with_parents(self): r = self._repo old_shas = set(r.object_store) old_refs = r.get_refs() commit_sha = r.do_commit(b'commit with no ref', committer=b'Test Committer ', author=b'Test Author ', commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ref=None, merge_heads=[self._root_commit]) new_shas = set(r.object_store) - old_shas # New sha is added, but no new refs self.assertEqual(1, len(new_shas)) new_commit = r[new_shas.pop()] self.assertEqual(r[self._root_commit].tree, new_commit.tree) self.assertEqual([self._root_commit], r[commit_sha].parents) self.assertEqual(old_refs, r.get_refs()) def test_stage_deleted(self): r = self._repo os.remove(os.path.join(r.path, 'a')) r.stage(['a']) r.stage(['a']) # double-stage a deleted path def test_commit_no_encode_decode(self): r = self._repo repo_path_bytes = r.path.encode(sys.getfilesystemencoding()) encodings = ('utf8', 'latin1') names = [u'À'.encode(encoding) for encoding in encodings] for name, encoding in zip(names, encodings): full_path = os.path.join(repo_path_bytes, name) with open(full_path, 'wb') as f: f.write(encoding.encode('ascii')) # These files are break tear_down_repo, so cleanup these files # ourselves. self.addCleanup(os.remove, full_path) r.stage(names) commit_sha = r.do_commit(b'Files with different encodings', committer=b'Test Committer ', author=b'Test Author ', commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ref=None, merge_heads=[self._root_commit]) for name, encoding in zip(names, encodings): mode, id = tree_lookup_path(r.get_object, r[commit_sha].tree, name) self.assertEqual(stat.S_IFREG | 0o644, mode) self.assertEqual(encoding.encode('ascii'), r[id].data) def test_discover_intended(self): path = os.path.join(self._repo_dir, 'b/c') r = Repo.discover(path) self.assertEqual(r.head(), self._repo.head()) def test_discover_isrepo(self): r = Repo.discover(self._repo_dir) self.assertEqual(r.head(), self._repo.head()) def test_discover_notrepo(self): with self.assertRaises(NotGitRepository): Repo.discover('/')