diff --git a/dulwich/cli.py b/dulwich/cli.py index 946f7820..e555a670 100755 --- a/dulwich/cli.py +++ b/dulwich/cli.py @@ -1,739 +1,751 @@ #!/usr/bin/python3 -u # # dulwich - Simple command-line interface to Dulwich # Copyright (C) 2008-2011 Jelmer Vernooij # vim: expandtab # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Simple command-line interface to Dulwich> This is a very simple command-line wrapper for Dulwich. It is by no means intended to be a full-blown Git command-line interface but just a way to test Dulwich. """ import os import sys from getopt import getopt import optparse import signal from typing import Dict, Type from dulwich import porcelain from dulwich.client import get_transport_and_path from dulwich.errors import ApplyDeltaError from dulwich.index import Index from dulwich.pack import Pack, sha_to_hex from dulwich.patch import write_tree_diff from dulwich.repo import Repo def signal_int(signal, frame): sys.exit(1) def signal_quit(signal, frame): import pdb pdb.set_trace() class Command(object): """A Dulwich subcommand.""" def run(self, args): """Run the command.""" raise NotImplementedError(self.run) class cmd_archive(Command): def run(self, args): parser = optparse.OptionParser() parser.add_option( - "--remote", type=str, help="Retrieve archive from specified remote repo" + "--remote", + type=str, + help="Retrieve archive from specified remote repo", ) options, args = parser.parse_args(args) committish = args.pop(0) if options.remote: client, path = get_transport_and_path(options.remote) client.archive( - path, committish, sys.stdout.write, write_error=sys.stderr.write + path, + committish, + sys.stdout.write, + write_error=sys.stderr.write, ) else: porcelain.archive( ".", committish, outstream=sys.stdout, errstream=sys.stderr ) class cmd_add(Command): def run(self, args): opts, args = getopt(args, "", []) porcelain.add(".", paths=args) class cmd_rm(Command): def run(self, args): opts, args = getopt(args, "", []) porcelain.rm(".", paths=args) class cmd_fetch_pack(Command): def run(self, args): opts, args = getopt(args, "", ["all"]) opts = dict(opts) client, path = get_transport_and_path(args.pop(0)) r = Repo(".") if "--all" in opts: determine_wants = r.object_store.determine_wants_all else: def determine_wants(x): return [y for y in args if y not in r.object_store] client.fetch(path, r, determine_wants) class cmd_fetch(Command): def run(self, args): opts, args = getopt(args, "", []) opts = dict(opts) client, path = get_transport_and_path(args.pop(0)) r = Repo(".") refs = client.fetch(path, r, progress=sys.stdout.write) print("Remote refs:") for item in refs.items(): print("%s -> %s" % item) class cmd_fsck(Command): def run(self, args): opts, args = getopt(args, "", []) opts = dict(opts) for (obj, msg) in porcelain.fsck("."): print("%s: %s" % (obj, msg)) class cmd_log(Command): def run(self, args): parser = optparse.OptionParser() parser.add_option( "--reverse", dest="reverse", action="store_true", help="Reverse order in which entries are printed", ) parser.add_option( "--name-status", dest="name_status", action="store_true", help="Print name/status for each changed file", ) options, args = parser.parse_args(args) porcelain.log( ".", paths=args, reverse=options.reverse, name_status=options.name_status, outstream=sys.stdout, ) class cmd_diff(Command): def run(self, args): opts, args = getopt(args, "", []) if args == []: print("Usage: dulwich diff COMMITID") sys.exit(1) r = Repo(".") commit_id = args[0] commit = r[commit_id] parent_commit = r[commit.parents[0]] write_tree_diff(sys.stdout, r.object_store, parent_commit.tree, commit.tree) class cmd_dump_pack(Command): def run(self, args): opts, args = getopt(args, "", []) if args == []: print("Usage: dulwich dump-pack FILENAME") sys.exit(1) basename, _ = os.path.splitext(args[0]) x = Pack(basename) print("Object names checksum: %s" % x.name()) print("Checksum: %s" % sha_to_hex(x.get_stored_checksum())) if not x.check(): print("CHECKSUM DOES NOT MATCH") print("Length: %d" % len(x)) for name in x: try: print("\t%s" % x[name]) except KeyError as k: print("\t%s: Unable to resolve base %s" % (name, k)) except ApplyDeltaError as e: print("\t%s: Unable to apply delta: %r" % (name, e)) class cmd_dump_index(Command): def run(self, args): opts, args = getopt(args, "", []) if args == []: print("Usage: dulwich dump-index FILENAME") sys.exit(1) filename = args[0] idx = Index(filename) for o in idx: print(o, idx[o]) class cmd_init(Command): def run(self, args): opts, args = getopt(args, "", ["bare"]) opts = dict(opts) if args == []: path = os.getcwd() else: path = args[0] porcelain.init(path, bare=("--bare" in opts)) class cmd_clone(Command): def run(self, args): parser = optparse.OptionParser() parser.add_option( "--bare", dest="bare", help="Whether to create a bare repository.", action="store_true", ) parser.add_option( "--depth", dest="depth", type=int, help="Depth at which to fetch" ) options, args = parser.parse_args(args) if args == []: print("usage: dulwich clone host:path [PATH]") sys.exit(1) source = args.pop(0) if len(args) > 0: target = args.pop(0) else: target = None porcelain.clone(source, target, bare=options.bare, depth=options.depth) class cmd_commit(Command): def run(self, args): opts, args = getopt(args, "", ["message"]) opts = dict(opts) porcelain.commit(".", message=opts["--message"]) class cmd_commit_tree(Command): def run(self, args): opts, args = getopt(args, "", ["message"]) if args == []: print("usage: dulwich commit-tree tree") sys.exit(1) opts = dict(opts) porcelain.commit_tree(".", tree=args[0], message=opts["--message"]) class cmd_update_server_info(Command): def run(self, args): porcelain.update_server_info(".") class cmd_symbolic_ref(Command): def run(self, args): opts, args = getopt(args, "", ["ref-name", "force"]) if not args: print("Usage: dulwich symbolic-ref REF_NAME [--force]") sys.exit(1) ref_name = args.pop(0) porcelain.symbolic_ref(".", ref_name=ref_name, force="--force" in args) class cmd_show(Command): def run(self, args): opts, args = getopt(args, "", []) porcelain.show(".", args) class cmd_diff_tree(Command): def run(self, args): opts, args = getopt(args, "", []) if len(args) < 2: print("Usage: dulwich diff-tree OLD-TREE NEW-TREE") sys.exit(1) porcelain.diff_tree(".", args[0], args[1]) class cmd_rev_list(Command): def run(self, args): opts, args = getopt(args, "", []) if len(args) < 1: print("Usage: dulwich rev-list COMMITID...") sys.exit(1) porcelain.rev_list(".", args) class cmd_tag(Command): def run(self, args): parser = optparse.OptionParser() parser.add_option( - "-a", "--annotated", help="Create an annotated tag.", action="store_true" + "-a", + "--annotated", + help="Create an annotated tag.", + action="store_true", ) parser.add_option( "-s", "--sign", help="Sign the annotated tag.", action="store_true" ) options, args = parser.parse_args(args) porcelain.tag_create( ".", args[0], annotated=options.annotated, sign=options.sign ) class cmd_repack(Command): def run(self, args): opts, args = getopt(args, "", []) opts = dict(opts) porcelain.repack(".") class cmd_reset(Command): def run(self, args): opts, args = getopt(args, "", ["hard", "soft", "mixed"]) opts = dict(opts) mode = "" if "--hard" in opts: mode = "hard" elif "--soft" in opts: mode = "soft" elif "--mixed" in opts: mode = "mixed" porcelain.reset(".", mode=mode, *args) class cmd_daemon(Command): def run(self, args): from dulwich import log_utils from dulwich.protocol import TCP_GIT_PORT parser = optparse.OptionParser() parser.add_option( "-l", "--listen_address", dest="listen_address", default="localhost", help="Binding IP address.", ) parser.add_option( "-p", "--port", dest="port", type=int, default=TCP_GIT_PORT, help="Binding TCP port.", ) options, args = parser.parse_args(args) log_utils.default_logging_config() if len(args) >= 1: gitdir = args[0] else: gitdir = "." from dulwich import porcelain porcelain.daemon(gitdir, address=options.listen_address, port=options.port) class cmd_web_daemon(Command): def run(self, args): from dulwich import log_utils parser = optparse.OptionParser() parser.add_option( "-l", "--listen_address", dest="listen_address", default="", help="Binding IP address.", ) parser.add_option( "-p", "--port", dest="port", type=int, default=8000, help="Binding TCP port.", ) options, args = parser.parse_args(args) log_utils.default_logging_config() if len(args) >= 1: gitdir = args[0] else: gitdir = "." from dulwich import porcelain porcelain.web_daemon(gitdir, address=options.listen_address, port=options.port) class cmd_write_tree(Command): def run(self, args): parser = optparse.OptionParser() options, args = parser.parse_args(args) sys.stdout.write("%s\n" % porcelain.write_tree(".")) class cmd_receive_pack(Command): def run(self, args): parser = optparse.OptionParser() options, args = parser.parse_args(args) if len(args) >= 1: gitdir = args[0] else: gitdir = "." porcelain.receive_pack(gitdir) class cmd_upload_pack(Command): def run(self, args): parser = optparse.OptionParser() options, args = parser.parse_args(args) if len(args) >= 1: gitdir = args[0] else: gitdir = "." porcelain.upload_pack(gitdir) class cmd_status(Command): def run(self, args): parser = optparse.OptionParser() options, args = parser.parse_args(args) if len(args) >= 1: gitdir = args[0] else: gitdir = "." status = porcelain.status(gitdir) if any(names for (kind, names) in status.staged.items()): sys.stdout.write("Changes to be committed:\n\n") for kind, names in status.staged.items(): for name in names: sys.stdout.write( "\t%s: %s\n" % (kind, name.decode(sys.getfilesystemencoding())) ) sys.stdout.write("\n") if status.unstaged: sys.stdout.write("Changes not staged for commit:\n\n") for name in status.unstaged: sys.stdout.write("\t%s\n" % name.decode(sys.getfilesystemencoding())) sys.stdout.write("\n") if status.untracked: sys.stdout.write("Untracked files:\n\n") for name in status.untracked: sys.stdout.write("\t%s\n" % name) sys.stdout.write("\n") class cmd_ls_remote(Command): def run(self, args): opts, args = getopt(args, "", []) if len(args) < 1: print("Usage: dulwich ls-remote URL") sys.exit(1) refs = porcelain.ls_remote(args[0]) for ref in sorted(refs): sys.stdout.write("%s\t%s\n" % (ref, refs[ref])) class cmd_ls_tree(Command): def run(self, args): parser = optparse.OptionParser() parser.add_option( "-r", "--recursive", action="store_true", help="Recusively list tree contents.", ) parser.add_option("--name-only", action="store_true", help="Only display name.") options, args = parser.parse_args(args) try: treeish = args.pop(0) except IndexError: treeish = None porcelain.ls_tree( ".", treeish, outstream=sys.stdout, recursive=options.recursive, name_only=options.name_only, ) class cmd_pack_objects(Command): def run(self, args): opts, args = getopt(args, "", ["stdout"]) opts = dict(opts) if len(args) < 1 and "--stdout" not in args: print("Usage: dulwich pack-objects basename") sys.exit(1) object_ids = [line.strip() for line in sys.stdin.readlines()] basename = args[0] if "--stdout" in opts: packf = getattr(sys.stdout, "buffer", sys.stdout) idxf = None close = [] else: packf = open(basename + ".pack", "w") idxf = open(basename + ".idx", "w") close = [packf, idxf] porcelain.pack_objects(".", object_ids, packf, idxf) for f in close: f.close() class cmd_pull(Command): def run(self, args): parser = optparse.OptionParser() options, args = parser.parse_args(args) try: from_location = args[0] except IndexError: from_location = None porcelain.pull(".", from_location) class cmd_push(Command): def run(self, args): parser = optparse.OptionParser() options, args = parser.parse_args(args) if len(args) < 2: print("Usage: dulwich push TO-LOCATION REFSPEC..") sys.exit(1) to_location = args[0] refspecs = args[1:] porcelain.push(".", to_location, refspecs) class cmd_remote_add(Command): def run(self, args): parser = optparse.OptionParser() options, args = parser.parse_args(args) porcelain.remote_add(".", args[0], args[1]) class SuperCommand(Command): subcommands = {} # type: Dict[str, Type[Command]] def run(self, args): if not args: print("Supported subcommands: %s" % ", ".join(self.subcommands.keys())) return False cmd = args[0] try: cmd_kls = self.subcommands[cmd] except KeyError: print("No such subcommand: %s" % args[0]) return False return cmd_kls().run(args[1:]) class cmd_remote(SuperCommand): subcommands = { "add": cmd_remote_add, } class cmd_check_ignore(Command): def run(self, args): parser = optparse.OptionParser() options, args = parser.parse_args(args) ret = 1 for path in porcelain.check_ignore(".", args): print(path) ret = 0 return ret class cmd_check_mailmap(Command): def run(self, args): parser = optparse.OptionParser() options, args = parser.parse_args(args) for arg in args: canonical_identity = porcelain.check_mailmap(".", arg) print(canonical_identity) class cmd_stash_list(Command): def run(self, args): parser = optparse.OptionParser() options, args = parser.parse_args(args) for i, entry in porcelain.stash_list("."): print("stash@{%d}: %s" % (i, entry.message.rstrip("\n"))) class cmd_stash_push(Command): def run(self, args): parser = optparse.OptionParser() options, args = parser.parse_args(args) porcelain.stash_push(".") print("Saved working directory and index state") class cmd_stash_pop(Command): def run(self, args): parser = optparse.OptionParser() options, args = parser.parse_args(args) porcelain.stash_pop(".") print("Restrored working directory and index state") class cmd_stash(SuperCommand): subcommands = { "list": cmd_stash_list, "pop": cmd_stash_pop, "push": cmd_stash_push, } class cmd_ls_files(Command): def run(self, args): parser = optparse.OptionParser() options, args = parser.parse_args(args) for name in porcelain.ls_files("."): print(name) class cmd_describe(Command): def run(self, args): parser = optparse.OptionParser() options, args = parser.parse_args(args) print(porcelain.describe(".")) class cmd_help(Command): def run(self, args): parser = optparse.OptionParser() parser.add_option( - "-a", "--all", dest="all", action="store_true", help="List all commands." + "-a", + "--all", + dest="all", + action="store_true", + help="List all commands.", ) options, args = parser.parse_args(args) if options.all: print("Available commands:") for cmd in sorted(commands): print(" %s" % cmd) else: print( """\ The dulwich command line tool is currently a very basic frontend for the Dulwich python module. For full functionality, please see the API reference. For a list of supported commands, see 'dulwich help -a'. """ ) commands = { "add": cmd_add, "archive": cmd_archive, "check-ignore": cmd_check_ignore, "check-mailmap": cmd_check_mailmap, "clone": cmd_clone, "commit": cmd_commit, "commit-tree": cmd_commit_tree, "describe": cmd_describe, "daemon": cmd_daemon, "diff": cmd_diff, "diff-tree": cmd_diff_tree, "dump-pack": cmd_dump_pack, "dump-index": cmd_dump_index, "fetch-pack": cmd_fetch_pack, "fetch": cmd_fetch, "fsck": cmd_fsck, "help": cmd_help, "init": cmd_init, "log": cmd_log, "ls-files": cmd_ls_files, "ls-remote": cmd_ls_remote, "ls-tree": cmd_ls_tree, "pack-objects": cmd_pack_objects, "pull": cmd_pull, "push": cmd_push, "receive-pack": cmd_receive_pack, "remote": cmd_remote, "repack": cmd_repack, "reset": cmd_reset, "rev-list": cmd_rev_list, "rm": cmd_rm, "show": cmd_show, "stash": cmd_stash, "status": cmd_status, "symbolic-ref": cmd_symbolic_ref, "tag": cmd_tag, "update-server-info": cmd_update_server_info, "upload-pack": cmd_upload_pack, "web-daemon": cmd_web_daemon, "write-tree": cmd_write_tree, } def main(argv=None): if argv is None: argv = sys.argv if len(argv) < 1: print("Usage: dulwich <%s> [OPTIONS...]" % ("|".join(commands.keys()))) return 1 cmd = argv[0] try: cmd_kls = commands[cmd] except KeyError: print("No such subcommand: %s" % cmd) return 1 # TODO(jelmer): Return non-0 on errors return cmd_kls().run(argv[1:]) if __name__ == "__main__": if "DULWICH_PDB" in os.environ and getattr(signal, "SIGQUIT", None): signal.signal(signal.SIGQUIT, signal_quit) # type: ignore signal.signal(signal.SIGINT, signal_int) sys.exit(main(sys.argv[1:])) diff --git a/dulwich/client.py b/dulwich/client.py index 7e405eb5..237600da 100644 --- a/dulwich/client.py +++ b/dulwich/client.py @@ -1,2097 +1,2187 @@ # client.py -- Implementation of the client side git protocols # Copyright (C) 2008-2013 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Client side support for the Git protocol. The Dulwich client supports the following capabilities: * thin-pack * multi_ack_detailed * multi_ack * side-band-64k * ofs-delta * quiet * report-status * delete-refs * shallow Known capabilities that are not supported: * no-progress * include-tag """ from contextlib import closing from io import BytesIO, BufferedReader import os import select import socket import subprocess import sys from typing import Optional, Dict, Callable, Set from urllib.parse import ( quote as urlquote, unquote as urlunquote, urlparse, urljoin, urlunsplit, urlunparse, ) import dulwich from dulwich.config import get_xdg_config_home_path from dulwich.errors import ( GitProtocolError, NotGitRepository, SendPackError, ) from dulwich.protocol import ( HangupException, _RBUFSIZE, agent_string, capability_agent, extract_capability_names, CAPABILITY_AGENT, CAPABILITY_DELETE_REFS, CAPABILITY_INCLUDE_TAG, CAPABILITY_MULTI_ACK, CAPABILITY_MULTI_ACK_DETAILED, CAPABILITY_OFS_DELTA, CAPABILITY_QUIET, CAPABILITY_REPORT_STATUS, CAPABILITY_SHALLOW, CAPABILITY_SYMREF, CAPABILITY_SIDE_BAND_64K, CAPABILITY_THIN_PACK, CAPABILITIES_REF, KNOWN_RECEIVE_CAPABILITIES, KNOWN_UPLOAD_CAPABILITIES, COMMAND_DEEPEN, COMMAND_SHALLOW, COMMAND_UNSHALLOW, COMMAND_DONE, COMMAND_HAVE, COMMAND_WANT, SIDE_BAND_CHANNEL_DATA, SIDE_BAND_CHANNEL_PROGRESS, SIDE_BAND_CHANNEL_FATAL, PktLineParser, Protocol, ProtocolFile, TCP_GIT_PORT, ZERO_SHA, extract_capabilities, parse_capability, ) from dulwich.pack import ( write_pack_data, write_pack_objects, ) from dulwich.refs import ( read_info_refs, ANNOTATED_TAG_SUFFIX, ) class InvalidWants(Exception): """Invalid wants.""" def __init__(self, wants): Exception.__init__( self, "requested wants not in server provided refs: %r" % wants ) class HTTPUnauthorized(Exception): """Raised when authentication fails.""" def __init__(self, www_authenticate, url): Exception.__init__(self, "No valid credentials provided") self.www_authenticate = www_authenticate self.url = url def _fileno_can_read(fileno): """Check if a file descriptor is readable.""" return len(select.select([fileno], [], [], 0)[0]) > 0 def _win32_peek_avail(handle): """Wrapper around PeekNamedPipe to check how many bytes are available.""" from ctypes import byref, wintypes, windll c_avail = wintypes.DWORD() c_message = wintypes.DWORD() success = windll.kernel32.PeekNamedPipe( handle, None, 0, None, byref(c_avail), byref(c_message) ) if not success: raise OSError(wintypes.GetLastError()) return c_avail.value COMMON_CAPABILITIES = [CAPABILITY_OFS_DELTA, CAPABILITY_SIDE_BAND_64K] UPLOAD_CAPABILITIES = [ CAPABILITY_THIN_PACK, CAPABILITY_MULTI_ACK, CAPABILITY_MULTI_ACK_DETAILED, CAPABILITY_SHALLOW, ] + COMMON_CAPABILITIES RECEIVE_CAPABILITIES = [ CAPABILITY_REPORT_STATUS, CAPABILITY_DELETE_REFS, ] + COMMON_CAPABILITIES class ReportStatusParser(object): """Handle status as reported by servers with 'report-status' capability.""" def __init__(self): self._done = False self._pack_status = None self._ref_statuses = [] def check(self): """Check if there were any errors and, if so, raise exceptions. Raises: SendPackError: Raised when the server could not unpack Returns: iterator over refs """ if self._pack_status not in (b"unpack ok", None): raise SendPackError(self._pack_status) for status in self._ref_statuses: try: status, rest = status.split(b" ", 1) except ValueError: # malformed response, move on to the next one continue if status == b"ng": ref, error = rest.split(b" ", 1) yield ref, error.decode("utf-8") elif status == b"ok": yield rest, None else: raise GitProtocolError("invalid ref status %r" % status) def handle_packet(self, pkt): """Handle a packet. Raises: GitProtocolError: Raised when packets are received after a flush packet. """ if self._done: raise GitProtocolError("received more data after status report") if pkt is None: self._done = True return if self._pack_status is None: self._pack_status = pkt.strip() else: ref_status = pkt.strip() self._ref_statuses.append(ref_status) def read_pkt_refs(proto): server_capabilities = None refs = {} # Receive refs from server for pkt in proto.read_pkt_seq(): (sha, ref) = pkt.rstrip(b"\n").split(None, 1) if sha == b"ERR": raise GitProtocolError(ref.decode("utf-8", "replace")) if server_capabilities is None: (ref, server_capabilities) = extract_capabilities(ref) refs[ref] = sha if len(refs) == 0: return {}, set([]) if refs == {CAPABILITIES_REF: ZERO_SHA}: refs = {} return refs, set(server_capabilities) class FetchPackResult(object): """Result of a fetch-pack operation. Attributes: refs: Dictionary with all remote refs symrefs: Dictionary with remote symrefs agent: User agent string """ _FORWARDED_ATTRS = [ "clear", "copy", "fromkeys", "get", "items", "keys", "pop", "popitem", "setdefault", "update", "values", "viewitems", "viewkeys", "viewvalues", ] def __init__(self, refs, symrefs, agent, new_shallow=None, new_unshallow=None): self.refs = refs self.symrefs = symrefs self.agent = agent self.new_shallow = new_shallow self.new_unshallow = new_unshallow def _warn_deprecated(self): import warnings warnings.warn( - "Use FetchPackResult.refs instead.", DeprecationWarning, stacklevel=3 + "Use FetchPackResult.refs instead.", + DeprecationWarning, + stacklevel=3, ) def __eq__(self, other): if isinstance(other, dict): self._warn_deprecated() return self.refs == other return ( self.refs == other.refs and self.symrefs == other.symrefs and self.agent == other.agent ) def __contains__(self, name): self._warn_deprecated() return name in self.refs def __getitem__(self, name): self._warn_deprecated() return self.refs[name] def __len__(self): self._warn_deprecated() return len(self.refs) def __iter__(self): self._warn_deprecated() return iter(self.refs) def __getattribute__(self, name): if name in type(self)._FORWARDED_ATTRS: self._warn_deprecated() return getattr(self.refs, name) return super(FetchPackResult, self).__getattribute__(name) def __repr__(self): return "%s(%r, %r, %r)" % ( self.__class__.__name__, self.refs, self.symrefs, self.agent, ) class SendPackResult(object): """Result of a upload-pack operation. Attributes: refs: Dictionary with all remote refs agent: User agent string ref_status: Optional dictionary mapping ref name to error message (if it failed to update), or None if it was updated successfully """ _FORWARDED_ATTRS = [ "clear", "copy", "fromkeys", "get", "items", "keys", "pop", "popitem", "setdefault", "update", "values", "viewitems", "viewkeys", "viewvalues", ] def __init__(self, refs, agent=None, ref_status=None): self.refs = refs self.agent = agent self.ref_status = ref_status def _warn_deprecated(self): import warnings warnings.warn( - "Use SendPackResult.refs instead.", DeprecationWarning, stacklevel=3 + "Use SendPackResult.refs instead.", + DeprecationWarning, + stacklevel=3, ) def __eq__(self, other): if isinstance(other, dict): self._warn_deprecated() return self.refs == other return self.refs == other.refs and self.agent == other.agent def __contains__(self, name): self._warn_deprecated() return name in self.refs def __getitem__(self, name): self._warn_deprecated() return self.refs[name] def __len__(self): self._warn_deprecated() return len(self.refs) def __iter__(self): self._warn_deprecated() return iter(self.refs) def __getattribute__(self, name): if name in type(self)._FORWARDED_ATTRS: self._warn_deprecated() return getattr(self.refs, name) return super(SendPackResult, self).__getattribute__(name) def __repr__(self): return "%s(%r, %r)" % (self.__class__.__name__, self.refs, self.agent) def _read_shallow_updates(proto): new_shallow = set() new_unshallow = set() for pkt in proto.read_pkt_seq(): cmd, sha = pkt.split(b" ", 1) if cmd == COMMAND_SHALLOW: new_shallow.add(sha.strip()) elif cmd == COMMAND_UNSHALLOW: new_unshallow.add(sha.strip()) else: raise GitProtocolError("unknown command %s" % pkt) return (new_shallow, new_unshallow) # TODO(durin42): this doesn't correctly degrade if the server doesn't # support some capabilities. This should work properly with servers # that don't support multi_ack. class GitClient(object): """Git smart server client.""" def __init__( - self, thin_packs=True, report_activity=None, quiet=False, include_tags=False + self, + thin_packs=True, + report_activity=None, + quiet=False, + include_tags=False, ): """Create a new GitClient instance. Args: thin_packs: Whether or not thin packs should be retrieved report_activity: Optional callback for reporting transport activity. include_tags: send annotated tags when sending the objects they point to """ self._report_activity = report_activity self._report_status_parser = None self._fetch_capabilities = set(UPLOAD_CAPABILITIES) self._fetch_capabilities.add(capability_agent()) self._send_capabilities = set(RECEIVE_CAPABILITIES) self._send_capabilities.add(capability_agent()) if quiet: self._send_capabilities.add(CAPABILITY_QUIET) if not thin_packs: self._fetch_capabilities.remove(CAPABILITY_THIN_PACK) if include_tags: self._fetch_capabilities.add(CAPABILITY_INCLUDE_TAG) def get_url(self, path): """Retrieves full url to given path. Args: path: Repository path (as string) Returns: Url to path (as string) """ raise NotImplementedError(self.get_url) @classmethod def from_parsedurl(cls, parsedurl, **kwargs): """Create an instance of this client from a urlparse.parsed object. Args: parsedurl: Result of urlparse() Returns: A `GitClient` object """ raise NotImplementedError(cls.from_parsedurl) def send_pack(self, path, update_refs, generate_pack_data, progress=None): """Upload a pack to a remote repository. Args: path: Repository path (as bytestring) update_refs: Function to determine changes to remote refs. Receive dict with existing remote refs, returns dict with changed refs (name -> sha, where sha=ZERO_SHA for deletions) generate_pack_data: Function that can return a tuple with number of objects and list of pack data to include progress: Optional progress function Returns: SendPackResult object Raises: SendPackError: if server rejects the pack data """ raise NotImplementedError(self.send_pack) def fetch(self, path, target, determine_wants=None, progress=None, depth=None): """Fetch into a target repository. Args: path: Path to fetch from (as bytestring) target: Target repository to fetch into determine_wants: Optional function to determine what refs to fetch. Receives dictionary of name->sha, should return list of shas to fetch. Defaults to all shas. progress: Optional progress function depth: Depth to fetch at Returns: Dictionary with all remote refs (not just those fetched) """ if determine_wants is None: determine_wants = target.object_store.determine_wants_all if CAPABILITY_THIN_PACK in self._fetch_capabilities: # TODO(jelmer): Avoid reading entire file into memory and # only processing it after the whole file has been fetched. f = BytesIO() def commit(): if f.tell(): f.seek(0) target.object_store.add_thin_pack(f.read, None) def abort(): pass else: f, commit, abort = target.object_store.add_pack() try: result = self.fetch_pack( path, determine_wants, target.get_graph_walker(), f.write, progress=progress, depth=depth, ) except BaseException: abort() raise else: commit() target.update_shallow(result.new_shallow, result.new_unshallow) return result def fetch_pack( - self, path, determine_wants, graph_walker, pack_data, progress=None, depth=None + self, + path, + determine_wants, + graph_walker, + pack_data, + progress=None, + depth=None, ): """Retrieve a pack from a git smart server. Args: path: Remote path to fetch from determine_wants: Function determine what refs to fetch. Receives dictionary of name->sha, should return list of shas to fetch. graph_walker: Object with next() and ack(). pack_data: Callback called for each bit of data in the pack progress: Callback for progress reports (strings) depth: Shallow fetch depth Returns: FetchPackResult object """ raise NotImplementedError(self.fetch_pack) def get_refs(self, path): """Retrieve the current refs from a git smart server. Args: path: Path to the repo to fetch from. (as bytestring) Returns: """ raise NotImplementedError(self.get_refs) def _read_side_band64k_data(self, proto, channel_callbacks): """Read per-channel data. This requires the side-band-64k capability. Args: proto: Protocol object to read from channel_callbacks: Dictionary mapping channels to packet handlers to use. None for a callback discards channel data. """ for pkt in proto.read_pkt_seq(): channel = ord(pkt[:1]) pkt = pkt[1:] try: cb = channel_callbacks[channel] except KeyError: raise AssertionError("Invalid sideband channel %d" % channel) else: if cb is not None: cb(pkt) @staticmethod def _should_send_pack(new_refs): # The packfile MUST NOT be sent if the only command used is delete. return any(sha != ZERO_SHA for sha in new_refs.values()) def _handle_receive_pack_head(self, proto, capabilities, old_refs, new_refs): """Handle the head of a 'git-receive-pack' request. Args: proto: Protocol object to read from capabilities: List of negotiated capabilities old_refs: Old refs, as received from the server new_refs: Refs to change Returns: (have, want) tuple """ want = [] have = [x for x in old_refs.values() if not x == ZERO_SHA] sent_capabilities = False for refname in new_refs: if not isinstance(refname, bytes): raise TypeError("refname is not a bytestring: %r" % refname) old_sha1 = old_refs.get(refname, ZERO_SHA) if not isinstance(old_sha1, bytes): raise TypeError( "old sha1 for %s is not a bytestring: %r" % (refname, old_sha1) ) new_sha1 = new_refs.get(refname, ZERO_SHA) if not isinstance(new_sha1, bytes): raise TypeError( "old sha1 for %s is not a bytestring %r" % (refname, new_sha1) ) if old_sha1 != new_sha1: if sent_capabilities: proto.write_pkt_line(old_sha1 + b" " + new_sha1 + b" " + refname) else: proto.write_pkt_line( old_sha1 + b" " + new_sha1 + b" " + refname + b"\0" + b" ".join(sorted(capabilities)) ) sent_capabilities = True if new_sha1 not in have and new_sha1 != ZERO_SHA: want.append(new_sha1) proto.write_pkt_line(None) return (have, want) def _negotiate_receive_pack_capabilities(self, server_capabilities): negotiated_capabilities = self._send_capabilities & server_capabilities agent = None for capability in server_capabilities: k, v = parse_capability(capability) if k == CAPABILITY_AGENT: agent = v unknown_capabilities = ( # noqa: F841 extract_capability_names(server_capabilities) - KNOWN_RECEIVE_CAPABILITIES ) # TODO(jelmer): warn about unknown capabilities return negotiated_capabilities, agent def _handle_receive_pack_tail( self, proto: Protocol, capabilities: Set[bytes], progress: Callable[[bytes], None] = None, ) -> Optional[Dict[bytes, Optional[str]]]: """Handle the tail of a 'git-receive-pack' request. Args: proto: Protocol object to read from capabilities: List of negotiated capabilities progress: Optional progress reporting function Returns: dict mapping ref name to: error message if the ref failed to update None if it was updated successfully """ if CAPABILITY_SIDE_BAND_64K in capabilities: if progress is None: def progress(x): pass channel_callbacks = {2: progress} if CAPABILITY_REPORT_STATUS in capabilities: channel_callbacks[1] = PktLineParser( self._report_status_parser.handle_packet ).parse self._read_side_band64k_data(proto, channel_callbacks) else: if CAPABILITY_REPORT_STATUS in capabilities: for pkt in proto.read_pkt_seq(): self._report_status_parser.handle_packet(pkt) if self._report_status_parser is not None: return dict(self._report_status_parser.check()) return None def _negotiate_upload_pack_capabilities(self, server_capabilities): unknown_capabilities = ( # noqa: F841 extract_capability_names(server_capabilities) - KNOWN_UPLOAD_CAPABILITIES ) # TODO(jelmer): warn about unknown capabilities symrefs = {} agent = None for capability in server_capabilities: k, v = parse_capability(capability) if k == CAPABILITY_SYMREF: (src, dst) = v.split(b":", 1) symrefs[src] = dst if k == CAPABILITY_AGENT: agent = v negotiated_capabilities = self._fetch_capabilities & server_capabilities return (negotiated_capabilities, symrefs, agent) def _handle_upload_pack_head( self, proto, capabilities, graph_walker, wants, can_read, depth ): """Handle the head of a 'git-upload-pack' request. Args: proto: Protocol object to read from capabilities: List of negotiated capabilities graph_walker: GraphWalker instance to call .ack() on wants: List of commits to fetch can_read: function that returns a boolean that indicates whether there is extra graph data to read on proto depth: Depth for request Returns: """ assert isinstance(wants, list) and isinstance(wants[0], bytes) proto.write_pkt_line( COMMAND_WANT + b" " + wants[0] + b" " + b" ".join(sorted(capabilities)) + b"\n" ) for want in wants[1:]: proto.write_pkt_line(COMMAND_WANT + b" " + want + b"\n") if depth not in (0, None) or getattr(graph_walker, "shallow", None): if CAPABILITY_SHALLOW not in capabilities: raise GitProtocolError( "server does not support shallow capability required for " "depth" ) for sha in graph_walker.shallow: proto.write_pkt_line(COMMAND_SHALLOW + b" " + sha + b"\n") if depth is not None: proto.write_pkt_line( COMMAND_DEEPEN + b" " + str(depth).encode("ascii") + b"\n" ) proto.write_pkt_line(None) if can_read is not None: (new_shallow, new_unshallow) = _read_shallow_updates(proto) else: new_shallow = new_unshallow = None else: new_shallow = new_unshallow = set() proto.write_pkt_line(None) have = next(graph_walker) while have: proto.write_pkt_line(COMMAND_HAVE + b" " + have + b"\n") if can_read is not None and can_read(): pkt = proto.read_pkt_line() parts = pkt.rstrip(b"\n").split(b" ") if parts[0] == b"ACK": graph_walker.ack(parts[1]) if parts[2] in (b"continue", b"common"): pass elif parts[2] == b"ready": break else: raise AssertionError( "%s not in ('continue', 'ready', 'common)" % parts[2] ) have = next(graph_walker) proto.write_pkt_line(COMMAND_DONE + b"\n") return (new_shallow, new_unshallow) def _handle_upload_pack_tail( self, proto, capabilities, graph_walker, pack_data, progress=None, rbufsize=_RBUFSIZE, ): """Handle the tail of a 'git-upload-pack' request. Args: proto: Protocol object to read from capabilities: List of negotiated capabilities graph_walker: GraphWalker instance to call .ack() on pack_data: Function to call with pack data progress: Optional progress reporting function rbufsize: Read buffer size Returns: """ pkt = proto.read_pkt_line() while pkt: parts = pkt.rstrip(b"\n").split(b" ") if parts[0] == b"ACK": graph_walker.ack(parts[1]) - if len(parts) < 3 or parts[2] not in (b"ready", b"continue", b"common"): + if len(parts) < 3 or parts[2] not in ( + b"ready", + b"continue", + b"common", + ): break pkt = proto.read_pkt_line() if CAPABILITY_SIDE_BAND_64K in capabilities: if progress is None: # Just ignore progress data def progress(x): pass self._read_side_band64k_data( proto, { SIDE_BAND_CHANNEL_DATA: pack_data, SIDE_BAND_CHANNEL_PROGRESS: progress, }, ) else: while True: data = proto.read(rbufsize) if data == b"": break pack_data(data) def check_wants(wants, refs): """Check that a set of wants is valid. Args: wants: Set of object SHAs to fetch refs: Refs dictionary to check against Returns: """ missing = set(wants) - { v for (k, v) in refs.items() if not k.endswith(ANNOTATED_TAG_SUFFIX) } if missing: raise InvalidWants(missing) def _remote_error_from_stderr(stderr): if stderr is None: return HangupException() lines = [line.rstrip(b"\n") for line in stderr.readlines()] for line in lines: if line.startswith(b"ERROR: "): return GitProtocolError(line[len(b"ERROR: ") :].decode("utf-8", "replace")) return HangupException(lines) class TraditionalGitClient(GitClient): """Traditional Git client.""" DEFAULT_ENCODING = "utf-8" def __init__(self, path_encoding=DEFAULT_ENCODING, **kwargs): self._remote_path_encoding = path_encoding super(TraditionalGitClient, self).__init__(**kwargs) def _connect(self, cmd, path): """Create a connection to the server. This method is abstract - concrete implementations should implement their own variant which connects to the server and returns an initialized Protocol object with the service ready for use and a can_read function which may be used to see if reads would block. Args: cmd: The git service name to which we should connect. path: The path we should pass to the service. (as bytestirng) """ raise NotImplementedError() def send_pack(self, path, update_refs, generate_pack_data, progress=None): """Upload a pack to a remote repository. Args: path: Repository path (as bytestring) update_refs: Function to determine changes to remote refs. Receive dict with existing remote refs, returns dict with changed refs (name -> sha, where sha=ZERO_SHA for deletions) generate_pack_data: Function that can return a tuple with number of objects and pack data to upload. progress: Optional callback called with progress updates Returns: SendPackResult Raises: SendPackError: if server rejects the pack data """ proto, unused_can_read, stderr = self._connect(b"receive-pack", path) with proto: try: old_refs, server_capabilities = read_pkt_refs(proto) except HangupException: raise _remote_error_from_stderr(stderr) - negotiated_capabilities, agent = self._negotiate_receive_pack_capabilities( - server_capabilities - ) + ( + negotiated_capabilities, + agent, + ) = self._negotiate_receive_pack_capabilities(server_capabilities) if CAPABILITY_REPORT_STATUS in negotiated_capabilities: self._report_status_parser = ReportStatusParser() report_status_parser = self._report_status_parser try: new_refs = orig_new_refs = update_refs(dict(old_refs)) except BaseException: proto.write_pkt_line(None) raise if set(new_refs.items()).issubset(set(old_refs.items())): proto.write_pkt_line(None) return SendPackResult(new_refs, agent=agent, ref_status={}) if CAPABILITY_DELETE_REFS not in server_capabilities: # Server does not support deletions. Fail later. new_refs = dict(orig_new_refs) for ref, sha in orig_new_refs.items(): if sha == ZERO_SHA: if CAPABILITY_REPORT_STATUS in negotiated_capabilities: report_status_parser._ref_statuses.append( b"ng " + ref + b" remote does not support deleting refs" ) report_status_parser._ref_status_ok = False del new_refs[ref] if new_refs is None: proto.write_pkt_line(None) return SendPackResult(old_refs, agent=agent, ref_status={}) if len(new_refs) == 0 and len(orig_new_refs): # NOOP - Original new refs filtered out by policy proto.write_pkt_line(None) if report_status_parser is not None: ref_status = dict(report_status_parser.check()) else: ref_status = None return SendPackResult(old_refs, agent=agent, ref_status=ref_status) (have, want) = self._handle_receive_pack_head( proto, negotiated_capabilities, old_refs, new_refs ) pack_data_count, pack_data = generate_pack_data( - have, want, ofs_delta=(CAPABILITY_OFS_DELTA in negotiated_capabilities) + have, + want, + ofs_delta=(CAPABILITY_OFS_DELTA in negotiated_capabilities), ) if self._should_send_pack(new_refs): write_pack_data(proto.write_file(), pack_data_count, pack_data) ref_status = self._handle_receive_pack_tail( proto, negotiated_capabilities, progress ) return SendPackResult(new_refs, agent=agent, ref_status=ref_status) def fetch_pack( - self, path, determine_wants, graph_walker, pack_data, progress=None, depth=None + self, + path, + determine_wants, + graph_walker, + pack_data, + progress=None, + depth=None, ): """Retrieve a pack from a git smart server. Args: path: Remote path to fetch from determine_wants: Function determine what refs to fetch. Receives dictionary of name->sha, should return list of shas to fetch. graph_walker: Object with next() and ack(). pack_data: Callback called for each bit of data in the pack progress: Callback for progress reports (strings) depth: Shallow fetch depth Returns: FetchPackResult object """ proto, can_read, stderr = self._connect(b"upload-pack", path) with proto: try: refs, server_capabilities = read_pkt_refs(proto) except HangupException: raise _remote_error_from_stderr(stderr) ( negotiated_capabilities, symrefs, agent, ) = self._negotiate_upload_pack_capabilities(server_capabilities) if refs is None: proto.write_pkt_line(None) return FetchPackResult(refs, symrefs, agent) try: wants = determine_wants(refs) except BaseException: proto.write_pkt_line(None) raise if wants is not None: wants = [cid for cid in wants if cid != ZERO_SHA] if not wants: proto.write_pkt_line(None) return FetchPackResult(refs, symrefs, agent) (new_shallow, new_unshallow) = self._handle_upload_pack_head( proto, negotiated_capabilities, graph_walker, wants, can_read, depth=depth, ) self._handle_upload_pack_tail( - proto, negotiated_capabilities, graph_walker, pack_data, progress + proto, + negotiated_capabilities, + graph_walker, + pack_data, + progress, ) return FetchPackResult(refs, symrefs, agent, new_shallow, new_unshallow) def get_refs(self, path): """Retrieve the current refs from a git smart server.""" # stock `git ls-remote` uses upload-pack proto, _, stderr = self._connect(b"upload-pack", path) with proto: try: refs, _ = read_pkt_refs(proto) except HangupException: raise _remote_error_from_stderr(stderr) proto.write_pkt_line(None) return refs def archive( self, path, committish, write_data, progress=None, write_error=None, format=None, subdirs=None, prefix=None, ): proto, can_read, stderr = self._connect(b"upload-archive", path) with proto: if format is not None: proto.write_pkt_line(b"argument --format=" + format) proto.write_pkt_line(b"argument " + committish) if subdirs is not None: for subdir in subdirs: proto.write_pkt_line(b"argument " + subdir) if prefix is not None: proto.write_pkt_line(b"argument --prefix=" + prefix) proto.write_pkt_line(None) try: pkt = proto.read_pkt_line() except HangupException: raise _remote_error_from_stderr(stderr) if pkt == b"NACK\n" or pkt == b"NACK": return elif pkt == b"ACK\n" or pkt == b"ACK": pass elif pkt.startswith(b"ERR "): raise GitProtocolError(pkt[4:].rstrip(b"\n").decode("utf-8", "replace")) else: raise AssertionError("invalid response %r" % pkt) ret = proto.read_pkt_line() if ret is not None: raise AssertionError("expected pkt tail") self._read_side_band64k_data( proto, { SIDE_BAND_CHANNEL_DATA: write_data, SIDE_BAND_CHANNEL_PROGRESS: progress, SIDE_BAND_CHANNEL_FATAL: write_error, }, ) class TCPGitClient(TraditionalGitClient): """A Git Client that works over TCP directly (i.e. git://).""" def __init__(self, host, port=None, **kwargs): if port is None: port = TCP_GIT_PORT self._host = host self._port = port super(TCPGitClient, self).__init__(**kwargs) @classmethod def from_parsedurl(cls, parsedurl, **kwargs): return cls(parsedurl.hostname, port=parsedurl.port, **kwargs) def get_url(self, path): netloc = self._host if self._port is not None and self._port != TCP_GIT_PORT: netloc += ":%d" % self._port return urlunsplit(("git", netloc, path, "", "")) def _connect(self, cmd, path): if not isinstance(cmd, bytes): raise TypeError(cmd) if not isinstance(path, bytes): path = path.encode(self._remote_path_encoding) sockaddrs = socket.getaddrinfo( self._host, self._port, socket.AF_UNSPEC, socket.SOCK_STREAM ) s = None err = socket.error("no address found for %s" % self._host) for (family, socktype, proto, canonname, sockaddr) in sockaddrs: s = socket.socket(family, socktype, proto) s.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) try: s.connect(sockaddr) break except socket.error as e: err = e if s is not None: s.close() s = None if s is None: raise err # -1 means system default buffering rfile = s.makefile("rb", -1) # 0 means unbuffered wfile = s.makefile("wb", 0) def close(): rfile.close() wfile.close() s.close() proto = Protocol( - rfile.read, wfile.write, close, report_activity=self._report_activity + rfile.read, + wfile.write, + close, + report_activity=self._report_activity, ) if path.startswith(b"/~"): path = path[1:] # TODO(jelmer): Alternative to ascii? proto.send_cmd(b"git-" + cmd, path, b"host=" + self._host.encode("ascii")) return proto, lambda: _fileno_can_read(s), None class SubprocessWrapper(object): """A socket-like object that talks to a subprocess via pipes.""" def __init__(self, proc): self.proc = proc self.read = BufferedReader(proc.stdout).read self.write = proc.stdin.write @property def stderr(self): return self.proc.stderr def can_read(self): if sys.platform == "win32": from msvcrt import get_osfhandle handle = get_osfhandle(self.proc.stdout.fileno()) return _win32_peek_avail(handle) != 0 else: return _fileno_can_read(self.proc.stdout.fileno()) def close(self): self.proc.stdin.close() self.proc.stdout.close() if self.proc.stderr: self.proc.stderr.close() self.proc.wait() def find_git_command(): """Find command to run for system Git (usually C Git).""" if sys.platform == "win32": # support .exe, .bat and .cmd try: # to avoid overhead import win32api except ImportError: # run through cmd.exe with some overhead return ["cmd", "/c", "git"] else: status, git = win32api.FindExecutable("git") return [git] else: return ["git"] class SubprocessGitClient(TraditionalGitClient): """Git client that talks to a server using a subprocess.""" @classmethod def from_parsedurl(cls, parsedurl, **kwargs): return cls(**kwargs) git_command = None def _connect(self, service, path): if not isinstance(service, bytes): raise TypeError(service) if isinstance(path, bytes): path = path.decode(self._remote_path_encoding) if self.git_command is None: git_command = find_git_command() argv = git_command + [service.decode("ascii"), path] p = subprocess.Popen( argv, bufsize=0, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) pw = SubprocessWrapper(p) return ( Protocol( - pw.read, pw.write, pw.close, report_activity=self._report_activity + pw.read, + pw.write, + pw.close, + report_activity=self._report_activity, ), pw.can_read, p.stderr, ) class LocalGitClient(GitClient): """Git Client that just uses a local Repo.""" def __init__(self, thin_packs=True, report_activity=None, config=None): """Create a new LocalGitClient instance. Args: thin_packs: Whether or not thin packs should be retrieved report_activity: Optional callback for reporting transport activity. """ self._report_activity = report_activity # Ignore the thin_packs argument def get_url(self, path): return urlunsplit(("file", "", path, "", "")) @classmethod def from_parsedurl(cls, parsedurl, **kwargs): return cls(**kwargs) @classmethod def _open_repo(cls, path): from dulwich.repo import Repo if not isinstance(path, str): path = os.fsdecode(path) return closing(Repo(path)) def send_pack(self, path, update_refs, generate_pack_data, progress=None): """Upload a pack to a remote repository. Args: path: Repository path (as bytestring) update_refs: Function to determine changes to remote refs. Receive dict with existing remote refs, returns dict with changed refs (name -> sha, where sha=ZERO_SHA for deletions) with number of items and pack data to upload. progress: Optional progress function Returns: SendPackResult Raises: SendPackError: if server rejects the pack data """ if not progress: def progress(x): pass with self._open_repo(path) as target: old_refs = target.get_refs() new_refs = update_refs(dict(old_refs)) have = [sha1 for sha1 in old_refs.values() if sha1 != ZERO_SHA] want = [] for refname, new_sha1 in new_refs.items(): if ( new_sha1 not in have and new_sha1 not in want and new_sha1 != ZERO_SHA ): want.append(new_sha1) if not want and set(new_refs.items()).issubset(set(old_refs.items())): return SendPackResult(new_refs, ref_status={}) target.object_store.add_pack_data( *generate_pack_data(have, want, ofs_delta=True) ) ref_status = {} for refname, new_sha1 in new_refs.items(): old_sha1 = old_refs.get(refname, ZERO_SHA) if new_sha1 != ZERO_SHA: if not target.refs.set_if_equals(refname, old_sha1, new_sha1): msg = "unable to set %s to %s" % (refname, new_sha1) progress(msg) ref_status[refname] = msg else: if not target.refs.remove_if_equals(refname, old_sha1): progress("unable to remove %s" % refname) ref_status[refname] = "unable to remove" return SendPackResult(new_refs, ref_status=ref_status) def fetch(self, path, target, determine_wants=None, progress=None, depth=None): """Fetch into a target repository. Args: path: Path to fetch from (as bytestring) target: Target repository to fetch into determine_wants: Optional function determine what refs to fetch. Receives dictionary of name->sha, should return list of shas to fetch. Defaults to all shas. progress: Optional progress function depth: Shallow fetch depth Returns: FetchPackResult object """ with self._open_repo(path) as r: refs = r.fetch( - target, determine_wants=determine_wants, progress=progress, depth=depth + target, + determine_wants=determine_wants, + progress=progress, + depth=depth, ) return FetchPackResult(refs, r.refs.get_symrefs(), agent_string()) def fetch_pack( - self, path, determine_wants, graph_walker, pack_data, progress=None, depth=None + self, + path, + determine_wants, + graph_walker, + pack_data, + progress=None, + depth=None, ): """Retrieve a pack from a git smart server. Args: path: Remote path to fetch from determine_wants: Function determine what refs to fetch. Receives dictionary of name->sha, should return list of shas to fetch. graph_walker: Object with next() and ack(). pack_data: Callback called for each bit of data in the pack progress: Callback for progress reports (strings) depth: Shallow fetch depth Returns: FetchPackResult object """ with self._open_repo(path) as r: objects_iter = r.fetch_objects( determine_wants, graph_walker, progress=progress, depth=depth ) symrefs = r.refs.get_symrefs() agent = agent_string() # Did the process short-circuit (e.g. in a stateless RPC call)? # Note that the client still expects a 0-object pack in most cases. if objects_iter is None: return FetchPackResult(None, symrefs, agent) protocol = ProtocolFile(None, pack_data) write_pack_objects(protocol, objects_iter) return FetchPackResult(r.get_refs(), symrefs, agent) def get_refs(self, path): """Retrieve the current refs from a git smart server.""" with self._open_repo(path) as target: return target.get_refs() # What Git client to use for local access default_local_git_client_cls = LocalGitClient class SSHVendor(object): """A client side SSH implementation.""" def connect_ssh( - self, host, command, username=None, port=None, password=None, key_filename=None + self, + host, + command, + username=None, + port=None, + password=None, + key_filename=None, ): # This function was deprecated in 0.9.1 import warnings warnings.warn( "SSHVendor.connect_ssh has been renamed to SSHVendor.run_command", DeprecationWarning, ) return self.run_command( host, command, username=username, port=port, password=password, key_filename=key_filename, ) def run_command( - self, host, command, username=None, port=None, password=None, key_filename=None + self, + host, + command, + username=None, + port=None, + password=None, + key_filename=None, ): """Connect to an SSH server. Run a command remotely and return a file-like object for interaction with the remote command. Args: host: Host name command: Command to run (as argv array) username: Optional ame of user to log in as port: Optional SSH port to use password: Optional ssh password for login or private key key_filename: Optional path to private keyfile Returns: """ raise NotImplementedError(self.run_command) class StrangeHostname(Exception): """Refusing to connect to strange SSH hostname.""" def __init__(self, hostname): super(StrangeHostname, self).__init__(hostname) class SubprocessSSHVendor(SSHVendor): """SSH vendor that shells out to the local 'ssh' command.""" def run_command( - self, host, command, username=None, port=None, password=None, key_filename=None + self, + host, + command, + username=None, + port=None, + password=None, + key_filename=None, ): if password is not None: raise NotImplementedError( "Setting password not supported by SubprocessSSHVendor." ) args = ["ssh", "-x"] if port: args.extend(["-p", str(port)]) if key_filename: args.extend(["-i", str(key_filename)]) if username: host = "%s@%s" % (username, host) if host.startswith("-"): raise StrangeHostname(hostname=host) args.append(host) proc = subprocess.Popen( args + [command], bufsize=0, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) return SubprocessWrapper(proc) class PLinkSSHVendor(SSHVendor): """SSH vendor that shells out to the local 'plink' command.""" def run_command( - self, host, command, username=None, port=None, password=None, key_filename=None + self, + host, + command, + username=None, + port=None, + password=None, + key_filename=None, ): if sys.platform == "win32": args = ["plink.exe", "-ssh"] else: args = ["plink", "-ssh"] if password is not None: import warnings warnings.warn( "Invoking PLink with a password exposes the password in the " "process list." ) args.extend(["-pw", str(password)]) if port: args.extend(["-P", str(port)]) if key_filename: args.extend(["-i", str(key_filename)]) if username: host = "%s@%s" % (username, host) if host.startswith("-"): raise StrangeHostname(hostname=host) args.append(host) proc = subprocess.Popen( args + [command], bufsize=0, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) return SubprocessWrapper(proc) def ParamikoSSHVendor(**kwargs): import warnings warnings.warn( "ParamikoSSHVendor has been moved to dulwich.contrib.paramiko_vendor.", DeprecationWarning, ) from dulwich.contrib.paramiko_vendor import ParamikoSSHVendor return ParamikoSSHVendor(**kwargs) # Can be overridden by users get_ssh_vendor = SubprocessSSHVendor class SSHGitClient(TraditionalGitClient): def __init__( self, host, port=None, username=None, vendor=None, config=None, password=None, key_filename=None, **kwargs ): self.host = host self.port = port self.username = username self.password = password self.key_filename = key_filename super(SSHGitClient, self).__init__(**kwargs) self.alternative_paths = {} if vendor is not None: self.ssh_vendor = vendor else: self.ssh_vendor = get_ssh_vendor() def get_url(self, path): netloc = self.host if self.port is not None: netloc += ":%d" % self.port if self.username is not None: netloc = urlquote(self.username, "@/:") + "@" + netloc return urlunsplit(("ssh", netloc, path, "", "")) @classmethod def from_parsedurl(cls, parsedurl, **kwargs): return cls( host=parsedurl.hostname, port=parsedurl.port, username=parsedurl.username, **kwargs ) def _get_cmd_path(self, cmd): cmd = self.alternative_paths.get(cmd, b"git-" + cmd) assert isinstance(cmd, bytes) return cmd def _connect(self, cmd, path): if not isinstance(cmd, bytes): raise TypeError(cmd) if isinstance(path, bytes): path = path.decode(self._remote_path_encoding) if path.startswith("/~"): path = path[1:] argv = ( self._get_cmd_path(cmd).decode(self._remote_path_encoding) + " '" + path + "'" ) kwargs = {} if self.password is not None: kwargs["password"] = self.password if self.key_filename is not None: kwargs["key_filename"] = self.key_filename con = self.ssh_vendor.run_command( self.host, argv, port=self.port, username=self.username, **kwargs ) return ( Protocol( - con.read, con.write, con.close, report_activity=self._report_activity + con.read, + con.write, + con.close, + report_activity=self._report_activity, ), con.can_read, getattr(con, "stderr", None), ) def default_user_agent_string(): # Start user agent with "git/", because GitHub requires this. :-( See # https://github.com/jelmer/dulwich/issues/562 for details. return "git/dulwich/%s" % ".".join([str(x) for x in dulwich.__version__]) -def default_urllib3_manager( +def default_urllib3_manager( # noqa: C901 config, pool_manager_cls=None, proxy_manager_cls=None, **override_kwargs ): """Return `urllib3` connection pool manager. Honour detected proxy configurations. Args: config: dulwich.config.ConfigDict` instance with Git configuration. kwargs: Additional arguments for urllib3.ProxyManager Returns: `pool_manager_cls` (defaults to `urllib3.ProxyManager`) instance for proxy configurations, `proxy_manager_cls` (defaults to `urllib3.PoolManager`) instance otherwise. """ proxy_server = user_agent = None ca_certs = ssl_verify = None if proxy_server is None: for proxyname in ("https_proxy", "http_proxy", "all_proxy"): proxy_server = os.environ.get(proxyname) if proxy_server is not None: break if config is not None: if proxy_server is None: try: proxy_server = config.get(b"http", b"proxy") except KeyError: pass try: user_agent = config.get(b"http", b"useragent") except KeyError: pass # TODO(jelmer): Support per-host settings try: ssl_verify = config.get_boolean(b"http", b"sslVerify") except KeyError: ssl_verify = True try: ca_certs = config.get(b"http", b"sslCAInfo") except KeyError: ca_certs = None if user_agent is None: user_agent = default_user_agent_string() headers = {"User-agent": user_agent} kwargs = {} if ssl_verify is True: kwargs["cert_reqs"] = "CERT_REQUIRED" elif ssl_verify is False: kwargs["cert_reqs"] = "CERT_NONE" else: # Default to SSL verification kwargs["cert_reqs"] = "CERT_REQUIRED" if ca_certs is not None: kwargs["ca_certs"] = ca_certs kwargs.update(override_kwargs) # Try really hard to find a SSL certificate path if "ca_certs" not in kwargs and kwargs.get("cert_reqs") != "CERT_NONE": try: import certifi except ImportError: pass else: kwargs["ca_certs"] = certifi.where() import urllib3 if proxy_server is not None: if proxy_manager_cls is None: proxy_manager_cls = urllib3.ProxyManager # `urllib3` requires a `str` object in both Python 2 and 3, while # `ConfigDict` coerces entries to `bytes` on Python 3. Compensate. if not isinstance(proxy_server, str): proxy_server = proxy_server.decode() manager = proxy_manager_cls(proxy_server, headers=headers, **kwargs) else: if pool_manager_cls is None: pool_manager_cls = urllib3.PoolManager manager = pool_manager_cls(headers=headers, **kwargs) return manager class HttpGitClient(GitClient): def __init__( self, base_url, dumb=None, pool_manager=None, config=None, username=None, password=None, **kwargs ): self._base_url = base_url.rstrip("/") + "/" self._username = username self._password = password self.dumb = dumb if pool_manager is None: self.pool_manager = default_urllib3_manager(config) else: self.pool_manager = pool_manager if username is not None: # No escaping needed: ":" is not allowed in username: # https://tools.ietf.org/html/rfc2617#section-2 credentials = "%s:%s" % (username, password) import urllib3.util basic_auth = urllib3.util.make_headers(basic_auth=credentials) self.pool_manager.headers.update(basic_auth) GitClient.__init__(self, **kwargs) def get_url(self, path): return self._get_url(path).rstrip("/") @classmethod def from_parsedurl(cls, parsedurl, **kwargs): password = parsedurl.password if password is not None: kwargs["password"] = urlunquote(password) username = parsedurl.username if username is not None: kwargs["username"] = urlunquote(username) netloc = parsedurl.hostname if parsedurl.port: netloc = "%s:%s" % (netloc, parsedurl.port) if parsedurl.username: netloc = "%s@%s" % (parsedurl.username, netloc) parsedurl = parsedurl._replace(netloc=netloc) return cls(urlunparse(parsedurl), **kwargs) def __repr__(self): - return "%s(%r, dumb=%r)" % (type(self).__name__, self._base_url, self.dumb) + return "%s(%r, dumb=%r)" % ( + type(self).__name__, + self._base_url, + self.dumb, + ) def _get_url(self, path): if not isinstance(path, str): # urllib3.util.url._encode_invalid_chars() converts the path back # to bytes using the utf-8 codec. path = path.decode("utf-8") return urljoin(self._base_url, path).rstrip("/") + "/" def _http_request(self, url, headers=None, data=None, allow_compression=False): """Perform HTTP request. Args: url: Request URL. headers: Optional custom headers to override defaults. data: Request data. allow_compression: Allow GZipped communication. Returns: Tuple (`response`, `read`), where response is an `urllib3` response object with additional `content_type` and `redirect_location` properties, and `read` is a consumable read method for the response data. """ req_headers = self.pool_manager.headers.copy() if headers is not None: req_headers.update(headers) req_headers["Pragma"] = "no-cache" if allow_compression: req_headers["Accept-Encoding"] = "gzip" else: req_headers["Accept-Encoding"] = "identity" if data is None: resp = self.pool_manager.request("GET", url, headers=req_headers) else: resp = self.pool_manager.request( "POST", url, headers=req_headers, body=data ) if resp.status == 404: raise NotGitRepository() if resp.status == 401: raise HTTPUnauthorized(resp.getheader("WWW-Authenticate"), url) if resp.status != 200: raise GitProtocolError( "unexpected http resp %d for %s" % (resp.status, url) ) # TODO: Optimization available by adding `preload_content=False` to the # request and just passing the `read` method on instead of going via # `BytesIO`, if we can guarantee that the entire response is consumed # before issuing the next to still allow for connection reuse from the # pool. read = BytesIO(resp.data).read resp.content_type = resp.getheader("Content-Type") # Check if geturl() is available (urllib3 version >= 1.23) try: resp_url = resp.geturl() except AttributeError: # get_redirect_location() is available for urllib3 >= 1.1 resp.redirect_location = resp.get_redirect_location() else: resp.redirect_location = resp_url if resp_url != url else "" return resp, read def _discover_references(self, service, base_url): assert base_url[-1] == "/" tail = "info/refs" headers = {"Accept": "*/*"} if self.dumb is not True: tail += "?service=%s" % service.decode("ascii") url = urljoin(base_url, tail) resp, read = self._http_request(url, headers, allow_compression=True) if resp.redirect_location: # Something changed (redirect!), so let's update the base URL if not resp.redirect_location.endswith(tail): raise GitProtocolError( "Redirected from URL %s to URL %s without %s" % (url, resp.redirect_location, tail) ) base_url = resp.redirect_location[: -len(tail)] try: self.dumb = not resp.content_type.startswith("application/x-git-") if not self.dumb: proto = Protocol(read, None) # The first line should mention the service try: [pkt] = list(proto.read_pkt_seq()) except ValueError: raise GitProtocolError("unexpected number of packets received") if pkt.rstrip(b"\n") != (b"# service=" + service): raise GitProtocolError( "unexpected first line %r from smart server" % pkt ) return read_pkt_refs(proto) + (base_url,) else: return read_info_refs(resp), set(), base_url finally: resp.close() def _smart_request(self, service, url, data): assert url[-1] == "/" url = urljoin(url, service) result_content_type = "application/x-%s-result" % service headers = { "Content-Type": "application/x-%s-request" % service, "Accept": result_content_type, "Content-Length": str(len(data)), } resp, read = self._http_request(url, headers, data) if resp.content_type != result_content_type: raise GitProtocolError( "Invalid content-type from server: %s" % resp.content_type ) return resp, read def send_pack(self, path, update_refs, generate_pack_data, progress=None): """Upload a pack to a remote repository. Args: path: Repository path (as bytestring) update_refs: Function to determine changes to remote refs. Receives dict with existing remote refs, returns dict with changed refs (name -> sha, where sha=ZERO_SHA for deletions) generate_pack_data: Function that can return a tuple with number of elements and pack data to upload. progress: Optional progress function Returns: SendPackResult Raises: SendPackError: if server rejects the pack data """ url = self._get_url(path) old_refs, server_capabilities, url = self._discover_references( b"git-receive-pack", url ) - negotiated_capabilities, agent = self._negotiate_receive_pack_capabilities( - server_capabilities - ) + ( + negotiated_capabilities, + agent, + ) = self._negotiate_receive_pack_capabilities(server_capabilities) negotiated_capabilities.add(capability_agent()) if CAPABILITY_REPORT_STATUS in negotiated_capabilities: self._report_status_parser = ReportStatusParser() new_refs = update_refs(dict(old_refs)) if new_refs is None: # Determine wants function is aborting the push. return SendPackResult(old_refs, agent=agent, ref_status={}) if set(new_refs.items()).issubset(set(old_refs.items())): return SendPackResult(new_refs, agent=agent, ref_status={}) if self.dumb: raise NotImplementedError(self.fetch_pack) req_data = BytesIO() req_proto = Protocol(None, req_data.write) (have, want) = self._handle_receive_pack_head( req_proto, negotiated_capabilities, old_refs, new_refs ) pack_data_count, pack_data = generate_pack_data( - have, want, ofs_delta=(CAPABILITY_OFS_DELTA in negotiated_capabilities) + have, + want, + ofs_delta=(CAPABILITY_OFS_DELTA in negotiated_capabilities), ) if self._should_send_pack(new_refs): write_pack_data(req_proto.write_file(), pack_data_count, pack_data) resp, read = self._smart_request( "git-receive-pack", url, data=req_data.getvalue() ) try: resp_proto = Protocol(read, None) ref_status = self._handle_receive_pack_tail( resp_proto, negotiated_capabilities, progress ) return SendPackResult(new_refs, agent=agent, ref_status=ref_status) finally: resp.close() def fetch_pack( - self, path, determine_wants, graph_walker, pack_data, progress=None, depth=None + self, + path, + determine_wants, + graph_walker, + pack_data, + progress=None, + depth=None, ): """Retrieve a pack from a git smart server. Args: path: Path to fetch from determine_wants: Callback that returns list of commits to fetch graph_walker: Object with next() and ack(). pack_data: Callback called for each bit of data in the pack progress: Callback for progress reports (strings) depth: Depth for request Returns: FetchPackResult object """ url = self._get_url(path) refs, server_capabilities, url = self._discover_references( b"git-upload-pack", url ) ( negotiated_capabilities, symrefs, agent, ) = self._negotiate_upload_pack_capabilities(server_capabilities) wants = determine_wants(refs) if wants is not None: wants = [cid for cid in wants if cid != ZERO_SHA] if not wants: return FetchPackResult(refs, symrefs, agent) if self.dumb: raise NotImplementedError(self.fetch_pack) req_data = BytesIO() req_proto = Protocol(None, req_data.write) (new_shallow, new_unshallow) = self._handle_upload_pack_head( req_proto, negotiated_capabilities, graph_walker, wants, can_read=None, depth=depth, ) resp, read = self._smart_request( "git-upload-pack", url, data=req_data.getvalue() ) try: resp_proto = Protocol(read, None) if new_shallow is None and new_unshallow is None: (new_shallow, new_unshallow) = _read_shallow_updates(resp_proto) self._handle_upload_pack_tail( - resp_proto, negotiated_capabilities, graph_walker, pack_data, progress + resp_proto, + negotiated_capabilities, + graph_walker, + pack_data, + progress, ) return FetchPackResult(refs, symrefs, agent, new_shallow, new_unshallow) finally: resp.close() def get_refs(self, path): """Retrieve the current refs from a git smart server.""" url = self._get_url(path) refs, _, _ = self._discover_references(b"git-upload-pack", url) return refs def get_transport_and_path_from_url(url, config=None, **kwargs): """Obtain a git client from a URL. Args: url: URL to open (a unicode string) config: Optional config object thin_packs: Whether or not thin packs should be retrieved report_activity: Optional callback for reporting transport activity. Returns: Tuple with client instance and relative path. """ parsed = urlparse(url) if parsed.scheme == "git": return (TCPGitClient.from_parsedurl(parsed, **kwargs), parsed.path) elif parsed.scheme in ("git+ssh", "ssh"): return SSHGitClient.from_parsedurl(parsed, **kwargs), parsed.path elif parsed.scheme in ("http", "https"): return ( HttpGitClient.from_parsedurl(parsed, config=config, **kwargs), parsed.path, ) elif parsed.scheme == "file": return ( default_local_git_client_cls.from_parsedurl(parsed, **kwargs), parsed.path, ) raise ValueError("unknown scheme '%s'" % parsed.scheme) def parse_rsync_url(location): """Parse a rsync-style URL.""" if ":" in location and "@" not in location: # SSH with no user@, zero or one leading slash. (host, path) = location.split(":", 1) user = None elif ":" in location: # SSH with user@host:foo. user_host, path = location.split(":", 1) if "@" in user_host: user, host = user_host.rsplit("@", 1) else: user = None host = user_host else: raise ValueError("not a valid rsync-style URL") return (user, host, path) def get_transport_and_path(location, **kwargs): """Obtain a git client from a URL. Args: location: URL or path (a string) config: Optional config object thin_packs: Whether or not thin packs should be retrieved report_activity: Optional callback for reporting transport activity. Returns: Tuple with client instance and relative path. """ # First, try to parse it as a URL try: return get_transport_and_path_from_url(location, **kwargs) except ValueError: pass if sys.platform == "win32" and location[0].isalpha() and location[1:3] == ":\\": # Windows local path return default_local_git_client_cls(**kwargs), location try: (username, hostname, path) = parse_rsync_url(location) except ValueError: # Otherwise, assume it's a local path. return default_local_git_client_cls(**kwargs), location else: return SSHGitClient(hostname, username=username, **kwargs), path DEFAULT_GIT_CREDENTIALS_PATHS = [ os.path.expanduser("~/.git-credentials"), get_xdg_config_home_path("git", "credentials"), ] def get_credentials_from_store( scheme, hostname, username=None, fnames=DEFAULT_GIT_CREDENTIALS_PATHS ): for fname in fnames: try: with open(fname, "rb") as f: for line in f: parsed_line = urlparse(line.strip()) if ( parsed_line.scheme == scheme and parsed_line.hostname == hostname and (username is None or parsed_line.username == username) ): return parsed_line.username, parsed_line.password except FileNotFoundError: # If the file doesn't exist, try the next one. continue diff --git a/dulwich/contrib/swift.py b/dulwich/contrib/swift.py index ea757b9d..b407ecfb 100644 --- a/dulwich/contrib/swift.py +++ b/dulwich/contrib/swift.py @@ -1,1076 +1,1079 @@ # swift.py -- Repo implementation atop OpenStack SWIFT # Copyright (C) 2013 eNovance SAS # # Author: Fabien Boucher # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Repo implementation atop OpenStack SWIFT.""" # TODO: Refactor to share more code with dulwich/repo.py. # TODO(fbo): Second attempt to _send() must be notified via real log # TODO(fbo): More logs for operations import os import stat import zlib import tempfile import posixpath import urllib.parse as urlparse from io import BytesIO from configparser import ConfigParser from geventhttpclient import HTTPClient from dulwich.greenthreads import ( GreenThreadsMissingObjectFinder, GreenThreadsObjectStoreIterator, ) from dulwich.lru_cache import LRUSizeCache from dulwich.objects import ( Blob, Commit, Tree, Tag, S_ISGITLINK, ) from dulwich.object_store import ( PackBasedObjectStore, PACKDIR, INFODIR, ) from dulwich.pack import ( PackData, Pack, PackIndexer, PackStreamCopier, write_pack_header, compute_file_sha, iter_sha1, write_pack_index_v2, load_pack_index_file, read_pack_header, _compute_object_size, unpack_object, write_pack_object, ) from dulwich.protocol import TCP_GIT_PORT from dulwich.refs import ( InfoRefsContainer, read_info_refs, write_info_refs, ) from dulwich.repo import ( BaseRepo, OBJECTDIR, ) from dulwich.server import ( Backend, TCPGitServer, ) import json import sys """ # Configuration file sample [swift] # Authentication URL (Keystone or Swift) auth_url = http://127.0.0.1:5000/v2.0 # Authentication version to use auth_ver = 2 # The tenant and username separated by a semicolon username = admin;admin # The user password password = pass # The Object storage region to use (auth v2) (Default RegionOne) region_name = RegionOne # The Object storage endpoint URL to use (auth v2) (Default internalURL) endpoint_type = internalURL # Concurrency to use for parallel tasks (Default 10) concurrency = 10 # Size of the HTTP pool (Default 10) http_pool_length = 10 # Timeout delay for HTTP connections (Default 20) http_timeout = 20 # Chunk size to read from pack (Bytes) (Default 12228) chunk_length = 12228 # Cache size (MBytes) (Default 20) cache_length = 20 """ class PackInfoObjectStoreIterator(GreenThreadsObjectStoreIterator): def __len__(self): while len(self.finder.objects_to_send): for _ in range(0, len(self.finder.objects_to_send)): sha = self.finder.next() self._shas.append(sha) return len(self._shas) class PackInfoMissingObjectFinder(GreenThreadsMissingObjectFinder): def next(self): while True: if not self.objects_to_send: return None (sha, name, leaf) = self.objects_to_send.pop() if sha not in self.sha_done: break if not leaf: info = self.object_store.pack_info_get(sha) if info[0] == Commit.type_num: self.add_todo([(info[2], "", False)]) elif info[0] == Tree.type_num: self.add_todo([tuple(i) for i in info[1]]) elif info[0] == Tag.type_num: self.add_todo([(info[1], None, False)]) if sha in self._tagged: self.add_todo([(self._tagged[sha], None, True)]) self.sha_done.add(sha) self.progress("counting objects: %d\r" % len(self.sha_done)) return (sha, name) def load_conf(path=None, file=None): """Load configuration in global var CONF Args: path: The path to the configuration file file: If provided read instead the file like object """ conf = ConfigParser() if file: try: conf.read_file(file, path) except AttributeError: # read_file only exists in Python3 conf.readfp(file) return conf confpath = None if not path: try: confpath = os.environ["DULWICH_SWIFT_CFG"] except KeyError: raise Exception("You need to specify a configuration file") else: confpath = path if not os.path.isfile(confpath): raise Exception("Unable to read configuration file %s" % confpath) conf.read(confpath) return conf def swift_load_pack_index(scon, filename): """Read a pack index file from Swift Args: scon: a `SwiftConnector` instance filename: Path to the index file objectise Returns: a `PackIndexer` instance """ with scon.get_object(filename) as f: return load_pack_index_file(filename, f) def pack_info_create(pack_data, pack_index): pack = Pack.from_objects(pack_data, pack_index) info = {} for obj in pack.iterobjects(): # Commit if obj.type_num == Commit.type_num: info[obj.id] = (obj.type_num, obj.parents, obj.tree) # Tree elif obj.type_num == Tree.type_num: shas = [ (s, n, not stat.S_ISDIR(m)) for n, m, s in obj.items() if not S_ISGITLINK(m) ] info[obj.id] = (obj.type_num, shas) # Blob elif obj.type_num == Blob.type_num: info[obj.id] = None # Tag elif obj.type_num == Tag.type_num: info[obj.id] = (obj.type_num, obj.object[1]) return zlib.compress(json.dumps(info)) def load_pack_info(filename, scon=None, file=None): if not file: f = scon.get_object(filename) else: f = file if not f: return None try: return json.loads(zlib.decompress(f.read())) finally: f.close() class SwiftException(Exception): pass class SwiftConnector(object): """A Connector to swift that manage authentication and errors catching""" def __init__(self, root, conf): """Initialize a SwiftConnector Args: root: The swift container that will act as Git bare repository conf: A ConfigParser Object """ self.conf = conf self.auth_ver = self.conf.get("swift", "auth_ver") if self.auth_ver not in ["1", "2"]: raise NotImplementedError("Wrong authentication version use either 1 or 2") self.auth_url = self.conf.get("swift", "auth_url") self.user = self.conf.get("swift", "username") self.password = self.conf.get("swift", "password") self.concurrency = self.conf.getint("swift", "concurrency") or 10 self.http_timeout = self.conf.getint("swift", "http_timeout") or 20 self.http_pool_length = self.conf.getint("swift", "http_pool_length") or 10 self.region_name = self.conf.get("swift", "region_name") or "RegionOne" self.endpoint_type = self.conf.get("swift", "endpoint_type") or "internalURL" self.cache_length = self.conf.getint("swift", "cache_length") or 20 self.chunk_length = self.conf.getint("swift", "chunk_length") or 12228 self.root = root block_size = 1024 * 12 # 12KB if self.auth_ver == "1": self.storage_url, self.token = self.swift_auth_v1() else: self.storage_url, self.token = self.swift_auth_v2() token_header = {"X-Auth-Token": str(self.token)} self.httpclient = HTTPClient.from_url( str(self.storage_url), concurrency=self.http_pool_length, block_size=block_size, connection_timeout=self.http_timeout, network_timeout=self.http_timeout, headers=token_header, ) self.base_path = str( posixpath.join(urlparse.urlparse(self.storage_url).path, self.root) ) def swift_auth_v1(self): self.user = self.user.replace(";", ":") auth_httpclient = HTTPClient.from_url( self.auth_url, connection_timeout=self.http_timeout, network_timeout=self.http_timeout, ) headers = {"X-Auth-User": self.user, "X-Auth-Key": self.password} path = urlparse.urlparse(self.auth_url).path ret = auth_httpclient.request("GET", path, headers=headers) # Should do something with redirections (301 in my case) if ret.status_code < 200 or ret.status_code >= 300: raise SwiftException( "AUTH v1.0 request failed on " + "%s with error code %s (%s)" % ( str(auth_httpclient.get_base_url()) + path, ret.status_code, str(ret.items()), ) ) storage_url = ret["X-Storage-Url"] token = ret["X-Auth-Token"] return storage_url, token def swift_auth_v2(self): self.tenant, self.user = self.user.split(";") auth_dict = {} auth_dict["auth"] = { "passwordCredentials": { "username": self.user, "password": self.password, }, "tenantName": self.tenant, } auth_json = json.dumps(auth_dict) headers = {"Content-Type": "application/json"} auth_httpclient = HTTPClient.from_url( self.auth_url, connection_timeout=self.http_timeout, network_timeout=self.http_timeout, ) path = urlparse.urlparse(self.auth_url).path if not path.endswith("tokens"): path = posixpath.join(path, "tokens") ret = auth_httpclient.request("POST", path, body=auth_json, headers=headers) if ret.status_code < 200 or ret.status_code >= 300: raise SwiftException( "AUTH v2.0 request failed on " + "%s with error code %s (%s)" % ( str(auth_httpclient.get_base_url()) + path, ret.status_code, str(ret.items()), ) ) auth_ret_json = json.loads(ret.read()) token = auth_ret_json["access"]["token"]["id"] catalogs = auth_ret_json["access"]["serviceCatalog"] object_store = [ o_store for o_store in catalogs if o_store["type"] == "object-store" ][0] endpoints = object_store["endpoints"] endpoint = [endp for endp in endpoints if endp["region"] == self.region_name][0] return endpoint[self.endpoint_type], token def test_root_exists(self): """Check that Swift container exist Returns: True if exist or None it not """ ret = self.httpclient.request("HEAD", self.base_path) if ret.status_code == 404: return None if ret.status_code < 200 or ret.status_code > 300: raise SwiftException( "HEAD request failed with error code %s" % ret.status_code ) return True def create_root(self): """Create the Swift container Raises: SwiftException: if unable to create """ if not self.test_root_exists(): ret = self.httpclient.request("PUT", self.base_path) if ret.status_code < 200 or ret.status_code > 300: raise SwiftException( "PUT request failed with error code %s" % ret.status_code ) def get_container_objects(self): """Retrieve objects list in a container Returns: A list of dict that describe objects or None if container does not exist """ qs = "?format=json" path = self.base_path + qs ret = self.httpclient.request("GET", path) if ret.status_code == 404: return None if ret.status_code < 200 or ret.status_code > 300: raise SwiftException( "GET request failed with error code %s" % ret.status_code ) content = ret.read() return json.loads(content) def get_object_stat(self, name): """Retrieve object stat Args: name: The object name Returns: A dict that describe the object or None if object does not exist """ path = self.base_path + "/" + name ret = self.httpclient.request("HEAD", path) if ret.status_code == 404: return None if ret.status_code < 200 or ret.status_code > 300: raise SwiftException( "HEAD request failed with error code %s" % ret.status_code ) resp_headers = {} for header, value in ret.items(): resp_headers[header.lower()] = value return resp_headers def put_object(self, name, content): """Put an object Args: name: The object name content: A file object Raises: SwiftException: if unable to create """ content.seek(0) data = content.read() path = self.base_path + "/" + name headers = {"Content-Length": str(len(data))} def _send(): ret = self.httpclient.request("PUT", path, body=data, headers=headers) return ret try: # Sometime got Broken Pipe - Dirty workaround ret = _send() except Exception: # Second attempt work ret = _send() if ret.status_code < 200 or ret.status_code > 300: raise SwiftException( "PUT request failed with error code %s" % ret.status_code ) def get_object(self, name, range=None): """Retrieve an object Args: name: The object name range: A string range like "0-10" to retrieve specified bytes in object content Returns: A file like instance or bytestring if range is specified """ headers = {} if range: headers["Range"] = "bytes=%s" % range path = self.base_path + "/" + name ret = self.httpclient.request("GET", path, headers=headers) if ret.status_code == 404: return None if ret.status_code < 200 or ret.status_code > 300: raise SwiftException( "GET request failed with error code %s" % ret.status_code ) content = ret.read() if range: return content return BytesIO(content) def del_object(self, name): """Delete an object Args: name: The object name Raises: SwiftException: if unable to delete """ path = self.base_path + "/" + name ret = self.httpclient.request("DELETE", path) if ret.status_code < 200 or ret.status_code > 300: raise SwiftException( "DELETE request failed with error code %s" % ret.status_code ) def del_root(self): """Delete the root container by removing container content Raises: SwiftException: if unable to delete """ for obj in self.get_container_objects(): self.del_object(obj["name"]) ret = self.httpclient.request("DELETE", self.base_path) if ret.status_code < 200 or ret.status_code > 300: raise SwiftException( "DELETE request failed with error code %s" % ret.status_code ) class SwiftPackReader(object): """A SwiftPackReader that mimic read and sync method The reader allows to read a specified amount of bytes from a given offset of a Swift object. A read offset is kept internaly. The reader will read from Swift a specified amount of data to complete its internal buffer. chunk_length specifiy the amount of data to read from Swift. """ def __init__(self, scon, filename, pack_length): """Initialize a SwiftPackReader Args: scon: a `SwiftConnector` instance filename: the pack filename pack_length: The size of the pack object """ self.scon = scon self.filename = filename self.pack_length = pack_length self.offset = 0 self.base_offset = 0 self.buff = b"" self.buff_length = self.scon.chunk_length def _read(self, more=False): if more: self.buff_length = self.buff_length * 2 offset = self.base_offset r = min(self.base_offset + self.buff_length, self.pack_length) ret = self.scon.get_object(self.filename, range="%s-%s" % (offset, r)) self.buff = ret def read(self, length): """Read a specified amount of Bytes form the pack object Args: length: amount of bytes to read Returns: a bytestring """ end = self.offset + length if self.base_offset + end > self.pack_length: data = self.buff[self.offset :] self.offset = end return data if end > len(self.buff): # Need to read more from swift self._read(more=True) return self.read(length) data = self.buff[self.offset : end] self.offset = end return data def seek(self, offset): """Seek to a specified offset Args: offset: the offset to seek to """ self.base_offset = offset self._read() self.offset = 0 def read_checksum(self): """Read the checksum from the pack Returns: the checksum bytestring """ return self.scon.get_object(self.filename, range="-20") class SwiftPackData(PackData): """The data contained in a packfile. We use the SwiftPackReader to read bytes from packs stored in Swift using the Range header feature of Swift. """ def __init__(self, scon, filename): """Initialize a SwiftPackReader Args: scon: a `SwiftConnector` instance filename: the pack filename """ self.scon = scon self._filename = filename self._header_size = 12 headers = self.scon.get_object_stat(self._filename) self.pack_length = int(headers["content-length"]) pack_reader = SwiftPackReader(self.scon, self._filename, self.pack_length) (version, self._num_objects) = read_pack_header(pack_reader.read) self._offset_cache = LRUSizeCache( - 1024 * 1024 * self.scon.cache_length, compute_size=_compute_object_size + 1024 * 1024 * self.scon.cache_length, + compute_size=_compute_object_size, ) self.pack = None def get_object_at(self, offset): if offset in self._offset_cache: return self._offset_cache[offset] assert offset >= self._header_size pack_reader = SwiftPackReader(self.scon, self._filename, self.pack_length) pack_reader.seek(offset) unpacked, _ = unpack_object(pack_reader.read) return (unpacked.pack_type_num, unpacked._obj()) def get_stored_checksum(self): pack_reader = SwiftPackReader(self.scon, self._filename, self.pack_length) return pack_reader.read_checksum() def close(self): pass class SwiftPack(Pack): """A Git pack object. Same implementation as pack.Pack except that _idx_load and _data_load are bounded to Swift version of load_pack_index and PackData. """ def __init__(self, *args, **kwargs): self.scon = kwargs["scon"] del kwargs["scon"] super(SwiftPack, self).__init__(*args, **kwargs) self._pack_info_path = self._basename + ".info" self._pack_info = None self._pack_info_load = lambda: load_pack_info(self._pack_info_path, self.scon) self._idx_load = lambda: swift_load_pack_index(self.scon, self._idx_path) self._data_load = lambda: SwiftPackData(self.scon, self._data_path) @property def pack_info(self): """The pack data object being used.""" if self._pack_info is None: self._pack_info = self._pack_info_load() return self._pack_info class SwiftObjectStore(PackBasedObjectStore): """A Swift Object Store Allow to manage a bare Git repository from Openstack Swift. This object store only supports pack files and not loose objects. """ def __init__(self, scon): """Open a Swift object store. Args: scon: A `SwiftConnector` instance """ super(SwiftObjectStore, self).__init__() self.scon = scon self.root = self.scon.root self.pack_dir = posixpath.join(OBJECTDIR, PACKDIR) self._alternates = None def _update_pack_cache(self): objects = self.scon.get_container_objects() pack_files = [ o["name"].replace(".pack", "") for o in objects if o["name"].endswith(".pack") ] ret = [] for basename in pack_files: pack = SwiftPack(basename, scon=self.scon) self._pack_cache[basename] = pack ret.append(pack) return ret def _iter_loose_objects(self): """Loose objects are not supported by this repository""" return [] def iter_shas(self, finder): """An iterator over pack's ObjectStore. Returns: a `ObjectStoreIterator` or `GreenThreadsObjectStoreIterator` instance if gevent is enabled """ shas = iter(finder.next, None) return PackInfoObjectStoreIterator(self, shas, finder, self.scon.concurrency) def find_missing_objects(self, *args, **kwargs): kwargs["concurrency"] = self.scon.concurrency return PackInfoMissingObjectFinder(self, *args, **kwargs) def pack_info_get(self, sha): for pack in self.packs: if sha in pack: return pack.pack_info[sha] def _collect_ancestors(self, heads, common=set()): def _find_parents(commit): for pack in self.packs: if commit in pack: try: parents = pack.pack_info[commit][1] except KeyError: # Seems to have no parents return [] return parents bases = set() commits = set() queue = [] queue.extend(heads) while queue: e = queue.pop(0) if e in common: bases.add(e) elif e not in commits: commits.add(e) parents = _find_parents(e) queue.extend(parents) return (commits, bases) def add_pack(self): """Add a new pack to this object store. Returns: Fileobject to write to and a commit function to call when the pack is finished. """ f = BytesIO() def commit(): f.seek(0) pack = PackData(file=f, filename="") entries = pack.sorted_entries() if len(entries): basename = posixpath.join( - self.pack_dir, "pack-%s" % iter_sha1(entry[0] for entry in entries) + self.pack_dir, + "pack-%s" % iter_sha1(entry[0] for entry in entries), ) index = BytesIO() write_pack_index_v2(index, entries, pack.get_stored_checksum()) self.scon.put_object(basename + ".pack", f) f.close() self.scon.put_object(basename + ".idx", index) index.close() final_pack = SwiftPack(basename, scon=self.scon) final_pack.check_length_and_checksum() self._add_cached_pack(basename, final_pack) return final_pack else: return None def abort(): pass return f, commit, abort def add_object(self, obj): self.add_objects( [ (obj, None), ] ) def _pack_cache_stale(self): return False def _get_loose_object(self, sha): return None def add_thin_pack(self, read_all, read_some): """Read a thin pack Read it from a stream and complete it in a temporary file. Then the pack and the corresponding index file are uploaded to Swift. """ fd, path = tempfile.mkstemp(prefix="tmp_pack_") f = os.fdopen(fd, "w+b") try: indexer = PackIndexer(f, resolve_ext_ref=self.get_raw) copier = PackStreamCopier(read_all, read_some, f, delta_iter=indexer) copier.verify() return self._complete_thin_pack(f, path, copier, indexer) finally: f.close() os.unlink(path) def _complete_thin_pack(self, f, path, copier, indexer): entries = list(indexer) # Update the header with the new number of objects. f.seek(0) write_pack_header(f, len(entries) + len(indexer.ext_refs())) # Must flush before reading (http://bugs.python.org/issue3207) f.flush() # Rescan the rest of the pack, computing the SHA with the new header. new_sha = compute_file_sha(f, end_ofs=-20) # Must reposition before writing (http://bugs.python.org/issue3207) f.seek(0, os.SEEK_CUR) # Complete the pack. for ext_sha in indexer.ext_refs(): assert len(ext_sha) == 20 type_num, data = self.get_raw(ext_sha) offset = f.tell() crc32 = write_pack_object(f, type_num, data, sha=new_sha) entries.append((ext_sha, offset, crc32)) pack_sha = new_sha.digest() f.write(pack_sha) f.flush() # Move the pack in. entries.sort() pack_base_name = posixpath.join( - self.pack_dir, "pack-" + os.fsdecode(iter_sha1(e[0] for e in entries)) + self.pack_dir, + "pack-" + os.fsdecode(iter_sha1(e[0] for e in entries)), ) self.scon.put_object(pack_base_name + ".pack", f) # Write the index. filename = pack_base_name + ".idx" index_file = BytesIO() write_pack_index_v2(index_file, entries, pack_sha) self.scon.put_object(filename, index_file) # Write pack info. f.seek(0) pack_data = PackData(filename="", file=f) index_file.seek(0) pack_index = load_pack_index_file("", index_file) serialized_pack_info = pack_info_create(pack_data, pack_index) f.close() index_file.close() pack_info_file = BytesIO(serialized_pack_info) filename = pack_base_name + ".info" self.scon.put_object(filename, pack_info_file) pack_info_file.close() # Add the pack to the store and return it. final_pack = SwiftPack(pack_base_name, scon=self.scon) final_pack.check_length_and_checksum() self._add_cached_pack(pack_base_name, final_pack) return final_pack class SwiftInfoRefsContainer(InfoRefsContainer): """Manage references in info/refs object.""" def __init__(self, scon, store): self.scon = scon self.filename = "info/refs" self.store = store f = self.scon.get_object(self.filename) if not f: f = BytesIO(b"") super(SwiftInfoRefsContainer, self).__init__(f) def _load_check_ref(self, name, old_ref): self._check_refname(name) f = self.scon.get_object(self.filename) if not f: return {} refs = read_info_refs(f) if old_ref is not None: if refs[name] != old_ref: return False return refs def _write_refs(self, refs): f = BytesIO() f.writelines(write_info_refs(refs, self.store)) self.scon.put_object(self.filename, f) def set_if_equals(self, name, old_ref, new_ref): """Set a refname to new_ref only if it currently equals old_ref.""" if name == "HEAD": return True refs = self._load_check_ref(name, old_ref) if not isinstance(refs, dict): return False refs[name] = new_ref self._write_refs(refs) self._refs[name] = new_ref return True def remove_if_equals(self, name, old_ref): """Remove a refname only if it currently equals old_ref.""" if name == "HEAD": return True refs = self._load_check_ref(name, old_ref) if not isinstance(refs, dict): return False del refs[name] self._write_refs(refs) del self._refs[name] return True def allkeys(self): try: self._refs["HEAD"] = self._refs["refs/heads/master"] except KeyError: pass return self._refs.keys() class SwiftRepo(BaseRepo): def __init__(self, root, conf): """Init a Git bare Repository on top of a Swift container. References are managed in info/refs objects by `SwiftInfoRefsContainer`. The root attribute is the Swift container that contain the Git bare repository. Args: root: The container which contains the bare repo conf: A ConfigParser object """ self.root = root.lstrip("/") self.conf = conf self.scon = SwiftConnector(self.root, self.conf) objects = self.scon.get_container_objects() if not objects: raise Exception("There is not any GIT repo here : %s" % self.root) objects = [o["name"].split("/")[0] for o in objects] if OBJECTDIR not in objects: raise Exception("This repository (%s) is not bare." % self.root) self.bare = True self._controldir = self.root object_store = SwiftObjectStore(self.scon) refs = SwiftInfoRefsContainer(self.scon, object_store) BaseRepo.__init__(self, object_store, refs) def _determine_file_mode(self): """Probe the file-system to determine whether permissions can be trusted. Returns: True if permissions can be trusted, False otherwise. """ return False def _put_named_file(self, filename, contents): """Put an object in a Swift container Args: filename: the path to the object to put on Swift contents: the content as bytestring """ with BytesIO() as f: f.write(contents) self.scon.put_object(filename, f) @classmethod def init_bare(cls, scon, conf): """Create a new bare repository. Args: scon: a `SwiftConnector` instance conf: a ConfigParser object Returns: a `SwiftRepo` instance """ scon.create_root() for obj in [ posixpath.join(OBJECTDIR, PACKDIR), posixpath.join(INFODIR, "refs"), ]: scon.put_object(obj, BytesIO(b"")) ret = cls(scon.root, conf) ret._init_files(True) return ret class SwiftSystemBackend(Backend): def __init__(self, logger, conf): self.conf = conf self.logger = logger def open_repository(self, path): self.logger.info("opening repository at %s", path) return SwiftRepo(path, self.conf) def cmd_daemon(args): """Entry point for starting a TCP git server.""" import optparse parser = optparse.OptionParser() parser.add_option( "-l", "--listen_address", dest="listen_address", default="127.0.0.1", help="Binding IP address.", ) parser.add_option( "-p", "--port", dest="port", type=int, default=TCP_GIT_PORT, help="Binding TCP port.", ) parser.add_option( "-c", "--swift_config", dest="swift_config", default="", help="Path to the configuration file for Swift backend.", ) options, args = parser.parse_args(args) try: import gevent import geventhttpclient # noqa: F401 except ImportError: print( "gevent and geventhttpclient libraries are mandatory " " for use the Swift backend." ) sys.exit(1) import gevent.monkey gevent.monkey.patch_socket() from dulwich import log_utils logger = log_utils.getLogger(__name__) conf = load_conf(options.swift_config) backend = SwiftSystemBackend(logger, conf) log_utils.default_logging_config() server = TCPGitServer(backend, options.listen_address, port=options.port) server.serve_forever() def cmd_init(args): import optparse parser = optparse.OptionParser() parser.add_option( "-c", "--swift_config", dest="swift_config", default="", help="Path to the configuration file for Swift backend.", ) options, args = parser.parse_args(args) conf = load_conf(options.swift_config) if args == []: parser.error("missing repository name") repo = args[0] scon = SwiftConnector(repo, conf) SwiftRepo.init_bare(scon, conf) def main(argv=sys.argv): commands = { "init": cmd_init, "daemon": cmd_daemon, } if len(sys.argv) < 2: print("Usage: %s <%s> [OPTIONS...]" % (sys.argv[0], "|".join(commands.keys()))) sys.exit(1) cmd = sys.argv[1] if cmd not in commands: print("No such subcommand: %s" % cmd) sys.exit(1) commands[cmd](sys.argv[2:]) if __name__ == "__main__": main() diff --git a/dulwich/contrib/test_swift.py b/dulwich/contrib/test_swift.py index 357d942a..35f6ba7c 100644 --- a/dulwich/contrib/test_swift.py +++ b/dulwich/contrib/test_swift.py @@ -1,489 +1,500 @@ # test_swift.py -- Unittests for the Swift backend. # Copyright (C) 2013 eNovance SAS # # Author: Fabien Boucher # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Tests for dulwich.contrib.swift.""" import posixpath from time import time from io import BytesIO, StringIO from unittest import skipIf from dulwich.tests import ( TestCase, ) from dulwich.tests.test_object_store import ( ObjectStoreTests, ) from dulwich.objects import ( Blob, Commit, Tree, Tag, parse_timezone, ) import json missing_libs = [] try: import gevent # noqa:F401 except ImportError: missing_libs.append("gevent") try: import geventhttpclient # noqa:F401 except ImportError: missing_libs.append("geventhttpclient") try: from unittest.mock import patch except ImportError: missing_libs.append("mock") skipmsg = "Required libraries are not installed (%r)" % missing_libs if not missing_libs: from dulwich.contrib import swift config_file = """[swift] auth_url = http://127.0.0.1:8080/auth/%(version_str)s auth_ver = %(version_int)s username = test;tester password = testing region_name = %(region_name)s endpoint_type = %(endpoint_type)s concurrency = %(concurrency)s chunk_length = %(chunk_length)s cache_length = %(cache_length)s http_pool_length = %(http_pool_length)s http_timeout = %(http_timeout)s """ def_config_file = { "version_str": "v1.0", "version_int": 1, "concurrency": 1, "chunk_length": 12228, "cache_length": 1, "region_name": "test", "endpoint_type": "internalURL", "http_pool_length": 1, "http_timeout": 1, } def create_swift_connector(store={}): return lambda root, conf: FakeSwiftConnector(root, conf=conf, store=store) class Response(object): def __init__(self, headers={}, status=200, content=None): self.headers = headers self.status_code = status self.content = content def __getitem__(self, key): return self.headers[key] def items(self): return self.headers.items() def read(self): return self.content def fake_auth_request_v1(*args, **kwargs): ret = Response( { "X-Storage-Url": "http://127.0.0.1:8080/v1.0/AUTH_fakeuser", "X-Auth-Token": "12" * 10, }, 200, ) return ret def fake_auth_request_v1_error(*args, **kwargs): ret = Response({}, 401) return ret def fake_auth_request_v2(*args, **kwargs): s_url = "http://127.0.0.1:8080/v1.0/AUTH_fakeuser" resp = { "access": { "token": {"id": "12" * 10}, "serviceCatalog": [ { "type": "object-store", "endpoints": [ { "region": "test", "internalURL": s_url, }, ], }, ], } } ret = Response(status=200, content=json.dumps(resp)) return ret def create_commit(data, marker=b"Default", blob=None): if not blob: blob = Blob.from_string(b"The blob content " + marker) tree = Tree() tree.add(b"thefile_" + marker, 0o100644, blob.id) cmt = Commit() if data: assert isinstance(data[-1], Commit) cmt.parents = [data[-1].id] cmt.tree = tree.id author = b"John Doe " + marker + b" " cmt.author = cmt.committer = author tz = parse_timezone(b"-0200")[0] cmt.commit_time = cmt.author_time = int(time()) cmt.commit_timezone = cmt.author_timezone = tz cmt.encoding = b"UTF-8" cmt.message = b"The commit message " + marker tag = Tag() tag.tagger = b"john@doe.net" tag.message = b"Annotated tag" tag.tag_timezone = parse_timezone(b"-0200")[0] tag.tag_time = cmt.author_time tag.object = (Commit, cmt.id) tag.name = b"v_" + marker + b"_0.1" return blob, tree, tag, cmt def create_commits(length=1, marker=b"Default"): data = [] for i in range(0, length): _marker = ("%s_%s" % (marker, i)).encode() blob, tree, tag, cmt = create_commit(data, _marker) data.extend([blob, tree, tag, cmt]) return data @skipIf(missing_libs, skipmsg) class FakeSwiftConnector(object): def __init__(self, root, conf, store=None): if store: self.store = store else: self.store = {} self.conf = conf self.root = root self.concurrency = 1 self.chunk_length = 12228 self.cache_length = 1 def put_object(self, name, content): name = posixpath.join(self.root, name) if hasattr(content, "seek"): content.seek(0) content = content.read() self.store[name] = content def get_object(self, name, range=None): name = posixpath.join(self.root, name) if not range: try: return BytesIO(self.store[name]) except KeyError: return None else: l, r = range.split("-") try: if not l: r = -int(r) return self.store[name][r:] else: return self.store[name][int(l) : int(r)] except KeyError: return None def get_container_objects(self): return [{"name": k.replace(self.root + "/", "")} for k in self.store] def create_root(self): if self.root in self.store.keys(): pass else: self.store[self.root] = "" def get_object_stat(self, name): name = posixpath.join(self.root, name) if name not in self.store: return None return {"content-length": len(self.store[name])} @skipIf(missing_libs, skipmsg) class TestSwiftRepo(TestCase): def setUp(self): super(TestSwiftRepo, self).setUp() self.conf = swift.load_conf(file=StringIO(config_file % def_config_file)) def test_init(self): store = {"fakerepo/objects/pack": ""} with patch( "dulwich.contrib.swift.SwiftConnector", new_callable=create_swift_connector, store=store, ): swift.SwiftRepo("fakerepo", conf=self.conf) def test_init_no_data(self): with patch( - "dulwich.contrib.swift.SwiftConnector", new_callable=create_swift_connector + "dulwich.contrib.swift.SwiftConnector", + new_callable=create_swift_connector, ): self.assertRaises(Exception, swift.SwiftRepo, "fakerepo", self.conf) def test_init_bad_data(self): store = {"fakerepo/.git/objects/pack": ""} with patch( "dulwich.contrib.swift.SwiftConnector", new_callable=create_swift_connector, store=store, ): self.assertRaises(Exception, swift.SwiftRepo, "fakerepo", self.conf) def test_put_named_file(self): store = {"fakerepo/objects/pack": ""} with patch( "dulwich.contrib.swift.SwiftConnector", new_callable=create_swift_connector, store=store, ): repo = swift.SwiftRepo("fakerepo", conf=self.conf) desc = b"Fake repo" repo._put_named_file("description", desc) self.assertEqual(repo.scon.store["fakerepo/description"], desc) def test_init_bare(self): fsc = FakeSwiftConnector("fakeroot", conf=self.conf) with patch( "dulwich.contrib.swift.SwiftConnector", new_callable=create_swift_connector, store=fsc.store, ): swift.SwiftRepo.init_bare(fsc, conf=self.conf) self.assertIn("fakeroot/objects/pack", fsc.store) self.assertIn("fakeroot/info/refs", fsc.store) self.assertIn("fakeroot/description", fsc.store) @skipIf(missing_libs, skipmsg) class TestSwiftInfoRefsContainer(TestCase): def setUp(self): super(TestSwiftInfoRefsContainer, self).setUp() content = ( b"22effb216e3a82f97da599b8885a6cadb488b4c5\trefs/heads/master\n" b"cca703b0e1399008b53a1a236d6b4584737649e4\trefs/heads/dev" ) self.store = {"fakerepo/info/refs": content} self.conf = swift.load_conf(file=StringIO(config_file % def_config_file)) self.fsc = FakeSwiftConnector("fakerepo", conf=self.conf) self.object_store = {} def test_init(self): """info/refs does not exists""" irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store) self.assertEqual(len(irc._refs), 0) self.fsc.store = self.store irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store) self.assertIn(b"refs/heads/dev", irc.allkeys()) self.assertIn(b"refs/heads/master", irc.allkeys()) def test_set_if_equals(self): self.fsc.store = self.store irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store) irc.set_if_equals( - b"refs/heads/dev", b"cca703b0e1399008b53a1a236d6b4584737649e4", b"1" * 40 + b"refs/heads/dev", + b"cca703b0e1399008b53a1a236d6b4584737649e4", + b"1" * 40, ) self.assertEqual(irc[b"refs/heads/dev"], b"1" * 40) def test_remove_if_equals(self): self.fsc.store = self.store irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store) irc.remove_if_equals( b"refs/heads/dev", b"cca703b0e1399008b53a1a236d6b4584737649e4" ) self.assertNotIn(b"refs/heads/dev", irc.allkeys()) @skipIf(missing_libs, skipmsg) class TestSwiftConnector(TestCase): def setUp(self): super(TestSwiftConnector, self).setUp() self.conf = swift.load_conf(file=StringIO(config_file % def_config_file)) with patch("geventhttpclient.HTTPClient.request", fake_auth_request_v1): self.conn = swift.SwiftConnector("fakerepo", conf=self.conf) def test_init_connector(self): self.assertEqual(self.conn.auth_ver, "1") self.assertEqual(self.conn.auth_url, "http://127.0.0.1:8080/auth/v1.0") self.assertEqual(self.conn.user, "test:tester") self.assertEqual(self.conn.password, "testing") self.assertEqual(self.conn.root, "fakerepo") self.assertEqual( self.conn.storage_url, "http://127.0.0.1:8080/v1.0/AUTH_fakeuser" ) self.assertEqual(self.conn.token, "12" * 10) self.assertEqual(self.conn.http_timeout, 1) self.assertEqual(self.conn.http_pool_length, 1) self.assertEqual(self.conn.concurrency, 1) self.conf.set("swift", "auth_ver", "2") self.conf.set("swift", "auth_url", "http://127.0.0.1:8080/auth/v2.0") with patch("geventhttpclient.HTTPClient.request", fake_auth_request_v2): conn = swift.SwiftConnector("fakerepo", conf=self.conf) self.assertEqual(conn.user, "tester") self.assertEqual(conn.tenant, "test") self.conf.set("swift", "auth_ver", "1") self.conf.set("swift", "auth_url", "http://127.0.0.1:8080/auth/v1.0") with patch("geventhttpclient.HTTPClient.request", fake_auth_request_v1_error): self.assertRaises( swift.SwiftException, lambda: swift.SwiftConnector("fakerepo", conf=self.conf), ) def test_root_exists(self): with patch("geventhttpclient.HTTPClient.request", lambda *args: Response()): self.assertEqual(self.conn.test_root_exists(), True) def test_root_not_exists(self): with patch( - "geventhttpclient.HTTPClient.request", lambda *args: Response(status=404) + "geventhttpclient.HTTPClient.request", + lambda *args: Response(status=404), ): self.assertEqual(self.conn.test_root_exists(), None) def test_create_root(self): with patch( - "dulwich.contrib.swift.SwiftConnector.test_root_exists", lambda *args: None + "dulwich.contrib.swift.SwiftConnector.test_root_exists", + lambda *args: None, ): with patch("geventhttpclient.HTTPClient.request", lambda *args: Response()): self.assertEqual(self.conn.create_root(), None) def test_create_root_fails(self): with patch( - "dulwich.contrib.swift.SwiftConnector.test_root_exists", lambda *args: None + "dulwich.contrib.swift.SwiftConnector.test_root_exists", + lambda *args: None, ): with patch( "geventhttpclient.HTTPClient.request", lambda *args: Response(status=404), ): self.assertRaises(swift.SwiftException, lambda: self.conn.create_root()) def test_get_container_objects(self): with patch( "geventhttpclient.HTTPClient.request", lambda *args: Response( content=json.dumps((({"name": "a"}, {"name": "b"}))) ), ): self.assertEqual(len(self.conn.get_container_objects()), 2) def test_get_container_objects_fails(self): with patch( - "geventhttpclient.HTTPClient.request", lambda *args: Response(status=404) + "geventhttpclient.HTTPClient.request", + lambda *args: Response(status=404), ): self.assertEqual(self.conn.get_container_objects(), None) def test_get_object_stat(self): with patch( "geventhttpclient.HTTPClient.request", lambda *args: Response(headers={"content-length": "10"}), ): self.assertEqual(self.conn.get_object_stat("a")["content-length"], "10") def test_get_object_stat_fails(self): with patch( - "geventhttpclient.HTTPClient.request", lambda *args: Response(status=404) + "geventhttpclient.HTTPClient.request", + lambda *args: Response(status=404), ): self.assertEqual(self.conn.get_object_stat("a"), None) def test_put_object(self): with patch( - "geventhttpclient.HTTPClient.request", lambda *args, **kwargs: Response() + "geventhttpclient.HTTPClient.request", + lambda *args, **kwargs: Response(), ): self.assertEqual(self.conn.put_object("a", BytesIO(b"content")), None) def test_put_object_fails(self): with patch( "geventhttpclient.HTTPClient.request", lambda *args, **kwargs: Response(status=400), ): self.assertRaises( swift.SwiftException, lambda: self.conn.put_object("a", BytesIO(b"content")), ) def test_get_object(self): with patch( "geventhttpclient.HTTPClient.request", lambda *args, **kwargs: Response(content=b"content"), ): self.assertEqual(self.conn.get_object("a").read(), b"content") with patch( "geventhttpclient.HTTPClient.request", lambda *args, **kwargs: Response(content=b"content"), ): self.assertEqual(self.conn.get_object("a", range="0-6"), b"content") def test_get_object_fails(self): with patch( "geventhttpclient.HTTPClient.request", lambda *args, **kwargs: Response(status=404), ): self.assertEqual(self.conn.get_object("a"), None) def test_del_object(self): with patch("geventhttpclient.HTTPClient.request", lambda *args: Response()): self.assertEqual(self.conn.del_object("a"), None) def test_del_root(self): with patch( - "dulwich.contrib.swift.SwiftConnector.del_object", lambda *args: None + "dulwich.contrib.swift.SwiftConnector.del_object", + lambda *args: None, ): with patch( "dulwich.contrib.swift.SwiftConnector." "get_container_objects", lambda *args: ({"name": "a"}, {"name": "b"}), ): with patch( - "geventhttpclient.HTTPClient.request", lambda *args: Response() + "geventhttpclient.HTTPClient.request", + lambda *args: Response(), ): self.assertEqual(self.conn.del_root(), None) @skipIf(missing_libs, skipmsg) class SwiftObjectStoreTests(ObjectStoreTests, TestCase): def setUp(self): TestCase.setUp(self) conf = swift.load_conf(file=StringIO(config_file % def_config_file)) fsc = FakeSwiftConnector("fakerepo", conf=conf) self.store = swift.SwiftObjectStore(fsc) diff --git a/dulwich/contrib/test_swift_smoke.py b/dulwich/contrib/test_swift_smoke.py index 8ddbcdd9..ac04a6bb 100644 --- a/dulwich/contrib/test_swift_smoke.py +++ b/dulwich/contrib/test_swift_smoke.py @@ -1,311 +1,313 @@ # test_smoke.py -- Functional tests for the Swift backend. # Copyright (C) 2013 eNovance SAS # # Author: Fabien Boucher # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Start functional tests A Swift installation must be available before starting those tests. The account and authentication method used during this functional tests must be changed in the configuration file passed as environment variable. The container used to create a fake repository is defined in cls.fakerepo and will be deleted after the tests. DULWICH_SWIFT_CFG=/tmp/conf.cfg PYTHONPATH=. python -m unittest \ dulwich.tests_swift.test_smoke """ import os import unittest import tempfile import shutil import gevent from gevent import monkey monkey.patch_all() from dulwich import ( # noqa:E402 server, repo, index, client, objects, ) from dulwich.contrib import swift # noqa:E402 class DulwichServer: """Start the TCPGitServer with Swift backend""" def __init__(self, backend, port): self.port = port self.backend = backend def run(self): self.server = server.TCPGitServer(self.backend, "localhost", port=self.port) self.job = gevent.spawn(self.server.serve_forever) def stop(self): self.server.shutdown() gevent.joinall((self.job,)) class SwiftSystemBackend(server.Backend): def open_repository(self, path): return swift.SwiftRepo(path, conf=swift.load_conf()) class SwiftRepoSmokeTest(unittest.TestCase): @classmethod def setUpClass(cls): cls.backend = SwiftSystemBackend() cls.port = 9148 cls.server_address = "localhost" cls.fakerepo = "fakerepo" cls.th_server = DulwichServer(cls.backend, cls.port) cls.th_server.run() cls.conf = swift.load_conf() @classmethod def tearDownClass(cls): cls.th_server.stop() def setUp(self): self.scon = swift.SwiftConnector(self.fakerepo, self.conf) if self.scon.test_root_exists(): try: self.scon.del_root() except swift.SwiftException: pass self.temp_d = tempfile.mkdtemp() if os.path.isdir(self.temp_d): shutil.rmtree(self.temp_d) def tearDown(self): if self.scon.test_root_exists(): try: self.scon.del_root() except swift.SwiftException: pass if os.path.isdir(self.temp_d): shutil.rmtree(self.temp_d) def test_init_bare(self): swift.SwiftRepo.init_bare(self.scon, self.conf) self.assertTrue(self.scon.test_root_exists()) obj = self.scon.get_container_objects() filtered = [ o for o in obj if o["name"] == "info/refs" or o["name"] == "objects/pack" ] self.assertEqual(len(filtered), 2) def test_clone_bare(self): local_repo = repo.Repo.init(self.temp_d, mkdir=True) swift.SwiftRepo.init_bare(self.scon, self.conf) tcp_client = client.TCPGitClient(self.server_address, port=self.port) remote_refs = tcp_client.fetch(self.fakerepo, local_repo) # The remote repo is empty (no refs retreived) self.assertEqual(remote_refs, None) def test_push_commit(self): def determine_wants(*args): return {"refs/heads/master": local_repo.refs["HEAD"]} local_repo = repo.Repo.init(self.temp_d, mkdir=True) # Nothing in the staging area local_repo.do_commit("Test commit", "fbo@localhost") sha = local_repo.refs.read_loose_ref("refs/heads/master") swift.SwiftRepo.init_bare(self.scon, self.conf) tcp_client = client.TCPGitClient(self.server_address, port=self.port) tcp_client.send_pack( self.fakerepo, determine_wants, local_repo.generate_pack_data ) swift_repo = swift.SwiftRepo("fakerepo", self.conf) remote_sha = swift_repo.refs.read_loose_ref("refs/heads/master") self.assertEqual(sha, remote_sha) def test_push_branch(self): def determine_wants(*args): return {"refs/heads/mybranch": local_repo.refs["refs/heads/mybranch"]} local_repo = repo.Repo.init(self.temp_d, mkdir=True) # Nothing in the staging area local_repo.do_commit("Test commit", "fbo@localhost", ref="refs/heads/mybranch") sha = local_repo.refs.read_loose_ref("refs/heads/mybranch") swift.SwiftRepo.init_bare(self.scon, self.conf) tcp_client = client.TCPGitClient(self.server_address, port=self.port) tcp_client.send_pack( "/fakerepo", determine_wants, local_repo.generate_pack_data ) swift_repo = swift.SwiftRepo(self.fakerepo, self.conf) remote_sha = swift_repo.refs.read_loose_ref("refs/heads/mybranch") self.assertEqual(sha, remote_sha) def test_push_multiple_branch(self): def determine_wants(*args): return { "refs/heads/mybranch": local_repo.refs["refs/heads/mybranch"], "refs/heads/master": local_repo.refs["refs/heads/master"], "refs/heads/pullr-108": local_repo.refs["refs/heads/pullr-108"], } local_repo = repo.Repo.init(self.temp_d, mkdir=True) # Nothing in the staging area local_shas = {} remote_shas = {} for branch in ("master", "mybranch", "pullr-108"): local_shas[branch] = local_repo.do_commit( - "Test commit %s" % branch, "fbo@localhost", ref="refs/heads/%s" % branch + "Test commit %s" % branch, + "fbo@localhost", + ref="refs/heads/%s" % branch, ) swift.SwiftRepo.init_bare(self.scon, self.conf) tcp_client = client.TCPGitClient(self.server_address, port=self.port) tcp_client.send_pack( self.fakerepo, determine_wants, local_repo.generate_pack_data ) swift_repo = swift.SwiftRepo("fakerepo", self.conf) for branch in ("master", "mybranch", "pullr-108"): remote_shas[branch] = swift_repo.refs.read_loose_ref( "refs/heads/%s" % branch ) self.assertDictEqual(local_shas, remote_shas) def test_push_data_branch(self): def determine_wants(*args): return {"refs/heads/master": local_repo.refs["HEAD"]} local_repo = repo.Repo.init(self.temp_d, mkdir=True) os.mkdir(os.path.join(self.temp_d, "dir")) files = ("testfile", "testfile2", "dir/testfile3") i = 0 for f in files: open(os.path.join(self.temp_d, f), "w").write("DATA %s" % i) i += 1 local_repo.stage(files) local_repo.do_commit("Test commit", "fbo@localhost", ref="refs/heads/master") swift.SwiftRepo.init_bare(self.scon, self.conf) tcp_client = client.TCPGitClient(self.server_address, port=self.port) tcp_client.send_pack( self.fakerepo, determine_wants, local_repo.generate_pack_data ) swift_repo = swift.SwiftRepo("fakerepo", self.conf) commit_sha = swift_repo.refs.read_loose_ref("refs/heads/master") otype, data = swift_repo.object_store.get_raw(commit_sha) commit = objects.ShaFile.from_raw_string(otype, data) otype, data = swift_repo.object_store.get_raw(commit._tree) tree = objects.ShaFile.from_raw_string(otype, data) objs = tree.items() objs_ = [] for tree_entry in objs: objs_.append(swift_repo.object_store.get_raw(tree_entry.sha)) # Blob self.assertEqual(objs_[1][1], "DATA 0") self.assertEqual(objs_[2][1], "DATA 1") # Tree self.assertEqual(objs_[0][0], 2) def test_clone_then_push_data(self): self.test_push_data_branch() shutil.rmtree(self.temp_d) local_repo = repo.Repo.init(self.temp_d, mkdir=True) tcp_client = client.TCPGitClient(self.server_address, port=self.port) remote_refs = tcp_client.fetch(self.fakerepo, local_repo) files = ( os.path.join(self.temp_d, "testfile"), os.path.join(self.temp_d, "testfile2"), ) local_repo["HEAD"] = remote_refs["refs/heads/master"] indexfile = local_repo.index_path() tree = local_repo["HEAD"].tree index.build_index_from_tree( local_repo.path, indexfile, local_repo.object_store, tree ) for f in files: self.assertEqual(os.path.isfile(f), True) def determine_wants(*args): return {"refs/heads/master": local_repo.refs["HEAD"]} os.mkdir(os.path.join(self.temp_d, "test")) files = ("testfile11", "testfile22", "test/testfile33") i = 0 for f in files: open(os.path.join(self.temp_d, f), "w").write("DATA %s" % i) i += 1 local_repo.stage(files) local_repo.do_commit("Test commit", "fbo@localhost", ref="refs/heads/master") tcp_client.send_pack( "/fakerepo", determine_wants, local_repo.generate_pack_data ) def test_push_remove_branch(self): def determine_wants(*args): return { "refs/heads/pullr-108": objects.ZERO_SHA, "refs/heads/master": local_repo.refs["refs/heads/master"], "refs/heads/mybranch": local_repo.refs["refs/heads/mybranch"], } self.test_push_multiple_branch() local_repo = repo.Repo(self.temp_d) tcp_client = client.TCPGitClient(self.server_address, port=self.port) tcp_client.send_pack( self.fakerepo, determine_wants, local_repo.generate_pack_data ) swift_repo = swift.SwiftRepo("fakerepo", self.conf) self.assertNotIn("refs/heads/pullr-108", swift_repo.refs.allkeys()) def test_push_annotated_tag(self): def determine_wants(*args): return { "refs/heads/master": local_repo.refs["HEAD"], "refs/tags/v1.0": local_repo.refs["refs/tags/v1.0"], } local_repo = repo.Repo.init(self.temp_d, mkdir=True) # Nothing in the staging area sha = local_repo.do_commit("Test commit", "fbo@localhost") otype, data = local_repo.object_store.get_raw(sha) commit = objects.ShaFile.from_raw_string(otype, data) tag = objects.Tag() tag.tagger = "fbo@localhost" tag.message = "Annotated tag" tag.tag_timezone = objects.parse_timezone("-0200")[0] tag.tag_time = commit.author_time tag.object = (objects.Commit, commit.id) tag.name = "v0.1" local_repo.object_store.add_object(tag) local_repo.refs["refs/tags/v1.0"] = tag.id swift.SwiftRepo.init_bare(self.scon, self.conf) tcp_client = client.TCPGitClient(self.server_address, port=self.port) tcp_client.send_pack( self.fakerepo, determine_wants, local_repo.generate_pack_data ) swift_repo = swift.SwiftRepo(self.fakerepo, self.conf) tag_sha = swift_repo.refs.read_loose_ref("refs/tags/v1.0") otype, data = swift_repo.object_store.get_raw(tag_sha) rtag = objects.ShaFile.from_raw_string(otype, data) self.assertEqual(rtag.object[1], commit.id) self.assertEqual(rtag.id, tag.id) if __name__ == "__main__": unittest.main() diff --git a/dulwich/errors.py b/dulwich/errors.py index 666bab3e..54022b05 100644 --- a/dulwich/errors.py +++ b/dulwich/errors.py @@ -1,198 +1,199 @@ # errors.py -- errors for dulwich # Copyright (C) 2007 James Westby # Copyright (C) 2009-2012 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Dulwich-related exception classes and utility functions.""" import binascii class ChecksumMismatch(Exception): """A checksum didn't match the expected contents.""" def __init__(self, expected, got, extra=None): if len(expected) == 20: expected = binascii.hexlify(expected) if len(got) == 20: got = binascii.hexlify(got) self.expected = expected self.got = got self.extra = extra if self.extra is None: Exception.__init__( - self, "Checksum mismatch: Expected %s, got %s" % (expected, got) + self, + "Checksum mismatch: Expected %s, got %s" % (expected, got), ) else: Exception.__init__( self, "Checksum mismatch: Expected %s, got %s; %s" % (expected, got, extra), ) class WrongObjectException(Exception): """Baseclass for all the _ is not a _ exceptions on objects. Do not instantiate directly. Subclasses should define a type_name attribute that indicates what was expected if they were raised. """ def __init__(self, sha, *args, **kwargs): Exception.__init__(self, "%s is not a %s" % (sha, self.type_name)) class NotCommitError(WrongObjectException): """Indicates that the sha requested does not point to a commit.""" type_name = "commit" class NotTreeError(WrongObjectException): """Indicates that the sha requested does not point to a tree.""" type_name = "tree" class NotTagError(WrongObjectException): """Indicates that the sha requested does not point to a tag.""" type_name = "tag" class NotBlobError(WrongObjectException): """Indicates that the sha requested does not point to a blob.""" type_name = "blob" class MissingCommitError(Exception): """Indicates that a commit was not found in the repository""" def __init__(self, sha, *args, **kwargs): self.sha = sha Exception.__init__(self, "%s is not in the revision store" % sha) class ObjectMissing(Exception): """Indicates that a requested object is missing.""" def __init__(self, sha, *args, **kwargs): Exception.__init__(self, "%s is not in the pack" % sha) class ApplyDeltaError(Exception): """Indicates that applying a delta failed.""" def __init__(self, *args, **kwargs): Exception.__init__(self, *args, **kwargs) class NotGitRepository(Exception): """Indicates that no Git repository was found.""" def __init__(self, *args, **kwargs): Exception.__init__(self, *args, **kwargs) class GitProtocolError(Exception): """Git protocol exception.""" def __init__(self, *args, **kwargs): Exception.__init__(self, *args, **kwargs) def __eq__(self, other): return isinstance(self, type(other)) and self.args == other.args class SendPackError(GitProtocolError): """An error occurred during send_pack.""" # N.B.: UpdateRefsError is no longer used and will be result in # Dulwich 0.21. # remove: >= 0.21 class UpdateRefsError(GitProtocolError): """The server reported errors updating refs.""" def __init__(self, *args, **kwargs): self.ref_status = kwargs.pop("ref_status") super(UpdateRefsError, self).__init__(*args, **kwargs) class HangupException(GitProtocolError): """Hangup exception.""" def __init__(self, stderr_lines=None): if stderr_lines: super(HangupException, self).__init__( "\n".join( [line.decode("utf-8", "surrogateescape") for line in stderr_lines] ) ) else: super(HangupException, self).__init__( "The remote server unexpectedly closed the connection." ) self.stderr_lines = stderr_lines def __eq__(self, other): return isinstance(self, type(other)) and self.stderr_lines == other.stderr_lines class UnexpectedCommandError(GitProtocolError): """Unexpected command received in a proto line.""" def __init__(self, command): if command is None: command = "flush-pkt" else: command = "command %s" % command super(UnexpectedCommandError, self).__init__( "Protocol got unexpected %s" % command ) class FileFormatException(Exception): """Base class for exceptions relating to reading git file formats.""" class PackedRefsException(FileFormatException): """Indicates an error parsing a packed-refs file.""" class ObjectFormatException(FileFormatException): """Indicates an error parsing an object.""" class NoIndexPresent(Exception): """No index is present.""" class CommitError(Exception): """An error occurred while performing a commit.""" class RefFormatError(Exception): """Indicates an invalid ref name.""" class HookError(Exception): """An error occurred while executing a hook.""" diff --git a/dulwich/fastexport.py b/dulwich/fastexport.py index ff7c3785..f34e0dd3 100644 --- a/dulwich/fastexport.py +++ b/dulwich/fastexport.py @@ -1,251 +1,258 @@ # __init__.py -- Fast export/import functionality # Copyright (C) 2010-2013 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Fast export/import functionality.""" from dulwich.index import ( commit_tree, ) from dulwich.objects import ( Blob, Commit, Tag, ZERO_SHA, ) from fastimport import ( # noqa: E402 commands, errors as fastimport_errors, parser, processor, ) import stat # noqa: E402 def split_email(text): (name, email) = text.rsplit(b" <", 1) return (name, email.rstrip(b">")) class GitFastExporter(object): """Generate a fast-export output stream for Git objects.""" def __init__(self, outf, store): self.outf = outf self.store = store self.markers = {} self._marker_idx = 0 def print_cmd(self, cmd): self.outf.write(getattr(cmd, "__bytes__", cmd.__repr__)() + b"\n") def _allocate_marker(self): self._marker_idx += 1 return ("%d" % (self._marker_idx,)).encode("ascii") def _export_blob(self, blob): marker = self._allocate_marker() self.markers[marker] = blob.id return (commands.BlobCommand(marker, blob.data), marker) def emit_blob(self, blob): (cmd, marker) = self._export_blob(blob) self.print_cmd(cmd) return marker def _iter_files(self, base_tree, new_tree): for ( (old_path, new_path), (old_mode, new_mode), (old_hexsha, new_hexsha), ) in self.store.tree_changes(base_tree, new_tree): if new_path is None: yield commands.FileDeleteCommand(old_path) continue if not stat.S_ISDIR(new_mode): blob = self.store[new_hexsha] marker = self.emit_blob(blob) if old_path != new_path and old_path is not None: yield commands.FileRenameCommand(old_path, new_path) if old_mode != new_mode or old_hexsha != new_hexsha: prefixed_marker = b":" + marker yield commands.FileModifyCommand( new_path, new_mode, prefixed_marker, None ) def _export_commit(self, commit, ref, base_tree=None): file_cmds = list(self._iter_files(base_tree, commit.tree)) marker = self._allocate_marker() if commit.parents: from_ = commit.parents[0] merges = commit.parents[1:] else: from_ = None merges = [] author, author_email = split_email(commit.author) committer, committer_email = split_email(commit.committer) cmd = commands.CommitCommand( ref, marker, (author, author_email, commit.author_time, commit.author_timezone), - (committer, committer_email, commit.commit_time, commit.commit_timezone), + ( + committer, + committer_email, + commit.commit_time, + commit.commit_timezone, + ), commit.message, from_, merges, file_cmds, ) return (cmd, marker) def emit_commit(self, commit, ref, base_tree=None): cmd, marker = self._export_commit(commit, ref, base_tree) self.print_cmd(cmd) return marker class GitImportProcessor(processor.ImportProcessor): """An import processor that imports into a Git repository using Dulwich.""" # FIXME: Batch creation of objects? def __init__(self, repo, params=None, verbose=False, outf=None): processor.ImportProcessor.__init__(self, params, verbose) self.repo = repo self.last_commit = ZERO_SHA self.markers = {} self._contents = {} def lookup_object(self, objectish): if objectish.startswith(b":"): return self.markers[objectish[1:]] return objectish def import_stream(self, stream): p = parser.ImportParser(stream) self.process(p.iter_commands) return self.markers def blob_handler(self, cmd): """Process a BlobCommand.""" blob = Blob.from_string(cmd.data) self.repo.object_store.add_object(blob) if cmd.mark: self.markers[cmd.mark] = blob.id def checkpoint_handler(self, cmd): """Process a CheckpointCommand.""" pass def commit_handler(self, cmd): """Process a CommitCommand.""" commit = Commit() if cmd.author is not None: author = cmd.author else: author = cmd.committer (author_name, author_email, author_timestamp, author_timezone) = author ( committer_name, committer_email, commit_timestamp, commit_timezone, ) = cmd.committer commit.author = author_name + b" <" + author_email + b">" commit.author_timezone = author_timezone commit.author_time = int(author_timestamp) commit.committer = committer_name + b" <" + committer_email + b">" commit.commit_timezone = commit_timezone commit.commit_time = int(commit_timestamp) commit.message = cmd.message commit.parents = [] if cmd.from_: cmd.from_ = self.lookup_object(cmd.from_) self._reset_base(cmd.from_) for filecmd in cmd.iter_files(): if filecmd.name == b"filemodify": if filecmd.data is not None: blob = Blob.from_string(filecmd.data) self.repo.object_store.add(blob) blob_id = blob.id else: blob_id = self.lookup_object(filecmd.dataref) self._contents[filecmd.path] = (filecmd.mode, blob_id) elif filecmd.name == b"filedelete": del self._contents[filecmd.path] elif filecmd.name == b"filecopy": self._contents[filecmd.dest_path] = self._contents[filecmd.src_path] elif filecmd.name == b"filerename": self._contents[filecmd.new_path] = self._contents[filecmd.old_path] del self._contents[filecmd.old_path] elif filecmd.name == b"filedeleteall": self._contents = {} else: raise Exception("Command %s not supported" % filecmd.name) commit.tree = commit_tree( self.repo.object_store, ((path, hexsha, mode) for (path, (mode, hexsha)) in self._contents.items()), ) if self.last_commit != ZERO_SHA: commit.parents.append(self.last_commit) for merge in cmd.merges: commit.parents.append(self.lookup_object(merge)) self.repo.object_store.add_object(commit) self.repo[cmd.ref] = commit.id self.last_commit = commit.id if cmd.mark: self.markers[cmd.mark] = commit.id def progress_handler(self, cmd): """Process a ProgressCommand.""" pass def _reset_base(self, commit_id): if self.last_commit == commit_id: return self._contents = {} self.last_commit = commit_id if commit_id != ZERO_SHA: tree_id = self.repo[commit_id].tree - for (path, mode, hexsha) in self.repo.object_store.iter_tree_contents( - tree_id - ): + for ( + path, + mode, + hexsha, + ) in self.repo.object_store.iter_tree_contents(tree_id): self._contents[path] = (mode, hexsha) def reset_handler(self, cmd): """Process a ResetCommand.""" if cmd.from_ is None: from_ = ZERO_SHA else: from_ = self.lookup_object(cmd.from_) self._reset_base(from_) self.repo.refs[cmd.ref] = from_ def tag_handler(self, cmd): """Process a TagCommand.""" tag = Tag() tag.tagger = cmd.tagger tag.message = cmd.message tag.name = cmd.tag self.repo.add_object(tag) self.repo.refs["refs/tags/" + tag.name] = tag.id def feature_handler(self, cmd): """Process a FeatureCommand.""" raise fastimport_errors.UnknownFeature(cmd.feature_name) diff --git a/dulwich/file.py b/dulwich/file.py index 922c4311..6abdc27d 100644 --- a/dulwich/file.py +++ b/dulwich/file.py @@ -1,204 +1,212 @@ # file.py -- Safe access to git files # Copyright (C) 2010 Google, Inc. # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Safe access to git files.""" import io import os import sys def ensure_dir_exists(dirname): """Ensure a directory exists, creating if necessary.""" try: os.makedirs(dirname) except FileExistsError: pass def _fancy_rename(oldname, newname): """Rename file with temporary backup file to rollback if rename fails""" if not os.path.exists(newname): try: os.rename(oldname, newname) except OSError: raise return # Defer the tempfile import since it pulls in a lot of other things. import tempfile # destination file exists try: (fd, tmpfile) = tempfile.mkstemp(".tmp", prefix=oldname, dir=".") os.close(fd) os.remove(tmpfile) except OSError: # either file could not be created (e.g. permission problem) # or could not be deleted (e.g. rude virus scanner) raise try: os.rename(newname, tmpfile) except OSError: raise # no rename occurred try: os.rename(oldname, newname) except OSError: os.rename(tmpfile, newname) raise os.remove(tmpfile) def GitFile(filename, mode="rb", bufsize=-1): """Create a file object that obeys the git file locking protocol. Returns: a builtin file object or a _GitFile object Note: See _GitFile for a description of the file locking protocol. Only read-only and write-only (binary) modes are supported; r+, w+, and a are not. To read and write from the same file, you can take advantage of the fact that opening a file for write does not actually open the file you request. """ if "a" in mode: raise IOError("append mode not supported for Git files") if "+" in mode: raise IOError("read/write mode not supported for Git files") if "b" not in mode: raise IOError("text mode not supported for Git files") if "w" in mode: return _GitFile(filename, mode, bufsize) else: return io.open(filename, mode, bufsize) class FileLocked(Exception): """File is already locked.""" def __init__(self, filename, lockfilename): self.filename = filename self.lockfilename = lockfilename super(FileLocked, self).__init__(filename, lockfilename) class _GitFile(object): """File that follows the git locking protocol for writes. All writes to a file foo will be written into foo.lock in the same directory, and the lockfile will be renamed to overwrite the original file on close. Note: You *must* call close() or abort() on a _GitFile for the lock to be released. Typically this will happen in a finally block. """ PROXY_PROPERTIES = set( - ["closed", "encoding", "errors", "mode", "name", "newlines", "softspace"] + [ + "closed", + "encoding", + "errors", + "mode", + "name", + "newlines", + "softspace", + ] ) PROXY_METHODS = ( "__iter__", "flush", "fileno", "isatty", "read", "readline", "readlines", "seek", "tell", "truncate", "write", "writelines", ) def __init__(self, filename, mode, bufsize): self._filename = filename if isinstance(self._filename, bytes): self._lockfilename = self._filename + b".lock" else: self._lockfilename = self._filename + ".lock" try: fd = os.open( self._lockfilename, os.O_RDWR | os.O_CREAT | os.O_EXCL | getattr(os, "O_BINARY", 0), ) except FileExistsError: raise FileLocked(filename, self._lockfilename) self._file = os.fdopen(fd, mode, bufsize) self._closed = False for method in self.PROXY_METHODS: setattr(self, method, getattr(self._file, method)) def abort(self): """Close and discard the lockfile without overwriting the target. If the file is already closed, this is a no-op. """ if self._closed: return self._file.close() try: os.remove(self._lockfilename) self._closed = True except FileNotFoundError: # The file may have been removed already, which is ok. self._closed = True def close(self): """Close this file, saving the lockfile over the original. Note: If this method fails, it will attempt to delete the lockfile. However, it is not guaranteed to do so (e.g. if a filesystem becomes suddenly read-only), which will prevent future writes to this file until the lockfile is removed manually. Raises: OSError: if the original file could not be overwritten. The lock file is still closed, so further attempts to write to the same file object will raise ValueError. """ if self._closed: return os.fsync(self._file.fileno()) self._file.close() try: if getattr(os, "replace", None) is not None: os.replace(self._lockfilename, self._filename) else: if sys.platform != "win32": os.rename(self._lockfilename, self._filename) else: # Windows versions prior to Vista don't support atomic # renames _fancy_rename(self._lockfilename, self._filename) finally: self.abort() def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.close() def __getattr__(self, name): """Proxy property calls to the underlying file.""" if name in self.PROXY_PROPERTIES: return getattr(self._file, name) raise AttributeError(name) diff --git a/dulwich/ignore.py b/dulwich/ignore.py index ec76b377..23be486e 100644 --- a/dulwich/ignore.py +++ b/dulwich/ignore.py @@ -1,387 +1,394 @@ # Copyright (C) 2017 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Parsing of gitignore files. For details for the matching rules, see https://git-scm.com/docs/gitignore """ import os.path import re from typing import ( BinaryIO, Iterable, List, Optional, TYPE_CHECKING, Dict, Union, ) if TYPE_CHECKING: from dulwich.repo import Repo from dulwich.config import get_xdg_config_home_path, Config def _translate_segment(segment: bytes) -> bytes: if segment == b"*": return b"[^/]+" res = b"" i, n = 0, len(segment) while i < n: c = segment[i : i + 1] i = i + 1 if c == b"*": res += b"[^/]*" elif c == b"?": res += b"[^/]" elif c == b"[": j = i if j < n and segment[j : j + 1] == b"!": j = j + 1 if j < n and segment[j : j + 1] == b"]": j = j + 1 while j < n and segment[j : j + 1] != b"]": j = j + 1 if j >= n: res += b"\\[" else: stuff = segment[i:j].replace(b"\\", b"\\\\") i = j + 1 if stuff.startswith(b"!"): stuff = b"^" + stuff[1:] elif stuff.startswith(b"^"): stuff = b"\\" + stuff res += b"[" + stuff + b"]" else: res += re.escape(c) return res def translate(pat: bytes) -> bytes: """Translate a shell PATTERN to a regular expression. There is no way to quote meta-characters. Originally copied from fnmatch in Python 2.7, but modified for Dulwich to cope with features in Git ignore patterns. """ res = b"(?ms)" if b"/" not in pat[:-1]: # If there's no slash, this is a filename-based match res += b"(.*/)?" if pat.startswith(b"**/"): # Leading **/ pat = pat[2:] res += b"(.*/)?" if pat.startswith(b"/"): pat = pat[1:] for i, segment in enumerate(pat.split(b"/")): if segment == b"**": res += b"(/.*)?" continue else: res += (re.escape(b"/") if i > 0 else b"") + _translate_segment(segment) if not pat.endswith(b"/"): res += b"/?" return res + b"\\Z" def read_ignore_patterns(f: BinaryIO) -> Iterable[bytes]: """Read a git ignore file. Args: f: File-like object to read from Returns: List of patterns """ for line in f: line = line.rstrip(b"\r\n") # Ignore blank lines, they're used for readability. if not line: continue if line.startswith(b"#"): # Comment continue # Trailing spaces are ignored unless they are quoted with a backslash. while line.endswith(b" ") and not line.endswith(b"\\ "): line = line[:-1] line = line.replace(b"\\ ", b" ") yield line def match_pattern(path: bytes, pattern: bytes, ignorecase: bool = False) -> bool: """Match a gitignore-style pattern against a path. Args: path: Path to match pattern: Pattern to match ignorecase: Whether to do case-sensitive matching Returns: bool indicating whether the pattern matched """ return Pattern(pattern, ignorecase).match(path) class Pattern(object): """A single ignore pattern.""" def __init__(self, pattern: bytes, ignorecase: bool = False): self.pattern = pattern self.ignorecase = ignorecase if pattern[0:1] == b"!": self.is_exclude = False pattern = pattern[1:] else: if pattern[0:1] == b"\\": pattern = pattern[1:] self.is_exclude = True flags = 0 if self.ignorecase: flags = re.IGNORECASE self._re = re.compile(translate(pattern), flags) def __bytes__(self) -> bytes: return self.pattern def __str__(self) -> str: return os.fsdecode(self.pattern) def __eq__(self, other: object) -> bool: return ( isinstance(other, type(self)) and self.pattern == other.pattern and self.ignorecase == other.ignorecase ) def __repr__(self) -> str: - return "%s(%r, %r)" % (type(self).__name__, self.pattern, self.ignorecase) + return "%s(%r, %r)" % ( + type(self).__name__, + self.pattern, + self.ignorecase, + ) def match(self, path: bytes) -> bool: """Try to match a path against this ignore pattern. Args: path: Path to match (relative to ignore location) Returns: boolean """ return bool(self._re.match(path)) class IgnoreFilter(object): def __init__(self, patterns: Iterable[bytes], ignorecase: bool = False, path=None): self._patterns = [] # type: List[Pattern] self._ignorecase = ignorecase self._path = path for pattern in patterns: self.append_pattern(pattern) def append_pattern(self, pattern: bytes) -> None: """Add a pattern to the set.""" self._patterns.append(Pattern(pattern, self._ignorecase)) def find_matching(self, path: Union[bytes, str]) -> Iterable[Pattern]: """Yield all matching patterns for path. Args: path: Path to match Returns: Iterator over iterators """ if not isinstance(path, bytes): path = os.fsencode(path) for pattern in self._patterns: if pattern.match(path): yield pattern def is_ignored(self, path: bytes) -> Optional[bool]: """Check whether a path is ignored. For directories, include a trailing slash. Returns: status is None if file is not mentioned, True if it is included, False if it is explicitly excluded. """ status = None for pattern in self.find_matching(path): status = pattern.is_exclude return status @classmethod def from_path(cls, path, ignorecase: bool = False) -> "IgnoreFilter": with open(path, "rb") as f: return cls(read_ignore_patterns(f), ignorecase, path=path) def __repr__(self) -> str: path = getattr(self, "_path", None) if path is not None: return "%s.from_path(%r)" % (type(self).__name__, path) else: return "<%s>" % (type(self).__name__) class IgnoreFilterStack(object): """Check for ignore status in multiple filters.""" def __init__(self, filters): self._filters = filters def is_ignored(self, path: str) -> Optional[bool]: """Check whether a path is explicitly included or excluded in ignores. Args: path: Path to check Returns: None if the file is not mentioned, True if it is included, False if it is explicitly excluded. """ status = None for filter in self._filters: status = filter.is_ignored(path) if status is not None: return status return status def default_user_ignore_filter_path(config: Config) -> str: """Return default user ignore filter path. Args: config: A Config object Returns: Path to a global ignore file """ try: return config.get((b"core",), b"excludesFile") except KeyError: pass return get_xdg_config_home_path("git", "ignore") class IgnoreFilterManager(object): """Ignore file manager.""" def __init__( - self, top_path: str, global_filters: List[IgnoreFilter], ignorecase: bool + self, + top_path: str, + global_filters: List[IgnoreFilter], + ignorecase: bool, ): self._path_filters = {} # type: Dict[str, Optional[IgnoreFilter]] self._top_path = top_path self._global_filters = global_filters self._ignorecase = ignorecase def __repr__(self) -> str: return "%s(%s, %r, %r)" % ( type(self).__name__, self._top_path, self._global_filters, self._ignorecase, ) def _load_path(self, path: str) -> Optional[IgnoreFilter]: try: return self._path_filters[path] except KeyError: pass p = os.path.join(self._top_path, path, ".gitignore") try: self._path_filters[path] = IgnoreFilter.from_path(p, self._ignorecase) except IOError: self._path_filters[path] = None return self._path_filters[path] def find_matching(self, path: str) -> Iterable[Pattern]: """Find matching patterns for path. Stops after the first ignore file with matches. Args: path: Path to check Returns: Iterator over Pattern instances """ if os.path.isabs(path): raise ValueError("%s is an absolute path" % path) filters = [(0, f) for f in self._global_filters] if os.path.sep != "/": path = path.replace(os.path.sep, "/") parts = path.split("/") for i in range(len(parts) + 1): dirname = "/".join(parts[:i]) for s, f in filters: relpath = "/".join(parts[s:i]) if i < len(parts): # Paths leading up to the final part are all directories, # so need a trailing slash. relpath += "/" matches = list(f.find_matching(relpath)) if matches: return iter(matches) ignore_filter = self._load_path(dirname) if ignore_filter is not None: filters.insert(0, (i, ignore_filter)) return iter([]) def is_ignored(self, path: str) -> Optional[bool]: """Check whether a path is explicitly included or excluded in ignores. Args: path: Path to check Returns: None if the file is not mentioned, True if it is included, False if it is explicitly excluded. """ matches = list(self.find_matching(path)) if matches: return matches[-1].is_exclude return None @classmethod def from_repo(cls, repo: "Repo") -> "IgnoreFilterManager": """Create a IgnoreFilterManager from a repository. Args: repo: Repository object Returns: A `IgnoreFilterManager` object """ global_filters = [] for p in [ os.path.join(repo.controldir(), "info", "exclude"), default_user_ignore_filter_path(repo.get_config_stack()), ]: try: global_filters.append(IgnoreFilter.from_path(os.path.expanduser(p))) except IOError: pass config = repo.get_config_stack() ignorecase = config.get_boolean((b"core"), (b"ignorecase"), False) return cls(repo.path, global_filters, ignorecase) diff --git a/dulwich/index.py b/dulwich/index.py index bdc6c466..920e11ff 100644 --- a/dulwich/index.py +++ b/dulwich/index.py @@ -1,940 +1,953 @@ # index.py -- File parser/writer for the git index file # Copyright (C) 2008-2013 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Parser for the git index file format.""" import collections import os import stat import struct import sys from typing import ( Any, BinaryIO, Callable, Dict, List, Optional, TYPE_CHECKING, Iterable, Iterator, Tuple, ) if TYPE_CHECKING: from dulwich.object_store import BaseObjectStore from dulwich.file import GitFile from dulwich.objects import ( Blob, S_IFGITLINK, S_ISGITLINK, Tree, hex_to_sha, sha_to_hex, ) from dulwich.pack import ( SHA1Reader, SHA1Writer, ) IndexEntry = collections.namedtuple( "IndexEntry", - ["ctime", "mtime", "dev", "ino", "mode", "uid", "gid", "size", "sha", "flags"], + [ + "ctime", + "mtime", + "dev", + "ino", + "mode", + "uid", + "gid", + "size", + "sha", + "flags", + ], ) FLAG_STAGEMASK = 0x3000 FLAG_VALID = 0x8000 FLAG_EXTENDED = 0x4000 DEFAULT_VERSION = 2 def pathsplit(path): """Split a /-delimited path into a directory part and a basename. Args: path: The path to split. Returns: Tuple with directory name and basename """ try: (dirname, basename) = path.rsplit(b"/", 1) except ValueError: return (b"", path) else: return (dirname, basename) def pathjoin(*args): """Join a /-delimited path.""" return b"/".join([p for p in args if p]) def read_cache_time(f): """Read a cache time. Args: f: File-like object to read from Returns: Tuple with seconds and nanoseconds """ return struct.unpack(">LL", f.read(8)) def write_cache_time(f, t): """Write a cache time. Args: f: File-like object to write to t: Time to write (as int, float or tuple with secs and nsecs) """ if isinstance(t, int): t = (t, 0) elif isinstance(t, float): (secs, nsecs) = divmod(t, 1.0) t = (int(secs), int(nsecs * 1000000000)) elif not isinstance(t, tuple): raise TypeError(t) f.write(struct.pack(">LL", *t)) def read_cache_entry(f): """Read an entry from a cache file. Args: f: File-like object to read from Returns: tuple with: device, inode, mode, uid, gid, size, sha, flags """ beginoffset = f.tell() ctime = read_cache_time(f) mtime = read_cache_time(f) ( dev, ino, mode, uid, gid, size, sha, flags, ) = struct.unpack(">LLLLLL20sH", f.read(20 + 4 * 6 + 2)) name = f.read((flags & 0x0FFF)) # Padding: real_size = (f.tell() - beginoffset + 8) & ~7 f.read((beginoffset + real_size) - f.tell()) return ( name, ctime, mtime, dev, ino, mode, uid, gid, size, sha_to_hex(sha), flags & ~0x0FFF, ) def write_cache_entry(f, entry): """Write an index entry to a file. Args: f: File object entry: Entry to write, tuple with: (name, ctime, mtime, dev, ino, mode, uid, gid, size, sha, flags) """ beginoffset = f.tell() (name, ctime, mtime, dev, ino, mode, uid, gid, size, sha, flags) = entry write_cache_time(f, ctime) write_cache_time(f, mtime) flags = len(name) | (flags & ~0x0FFF) f.write( struct.pack( b">LLLLLL20sH", dev & 0xFFFFFFFF, ino & 0xFFFFFFFF, mode, uid, gid, size, hex_to_sha(sha), flags, ) ) f.write(name) real_size = (f.tell() - beginoffset + 8) & ~7 f.write(b"\0" * ((beginoffset + real_size) - f.tell())) def read_index(f: BinaryIO): """Read an index file, yielding the individual entries.""" header = f.read(4) if header != b"DIRC": raise AssertionError("Invalid index file header: %r" % header) (version, num_entries) = struct.unpack(b">LL", f.read(4 * 2)) assert version in (1, 2) for i in range(num_entries): yield read_cache_entry(f) def read_index_dict(f): """Read an index file and return it as a dictionary. Args: f: File object to read from """ ret = {} for x in read_index(f): ret[x[0]] = IndexEntry(*x[1:]) return ret def write_index(f: BinaryIO, entries: List[Any], version: Optional[int] = None): """Write an index file. Args: f: File-like object to write to version: Version number to write entries: Iterable over the entries to write """ if version is None: version = DEFAULT_VERSION f.write(b"DIRC") f.write(struct.pack(b">LL", version, len(entries))) for x in entries: write_cache_entry(f, x) def write_index_dict( - f: BinaryIO, entries: Dict[bytes, IndexEntry], version: Optional[int] = None + f: BinaryIO, + entries: Dict[bytes, IndexEntry], + version: Optional[int] = None, ) -> None: """Write an index file based on the contents of a dictionary.""" entries_list = [] for name in sorted(entries): entries_list.append((name,) + tuple(entries[name])) write_index(f, entries_list, version=version) def cleanup_mode(mode: int) -> int: """Cleanup a mode value. This will return a mode that can be stored in a tree object. Args: mode: Mode to clean up. Returns: mode """ if stat.S_ISLNK(mode): return stat.S_IFLNK elif stat.S_ISDIR(mode): return stat.S_IFDIR elif S_ISGITLINK(mode): return S_IFGITLINK ret = stat.S_IFREG | 0o644 if mode & 0o100: ret |= 0o111 return ret class Index(object): """A Git Index file.""" def __init__(self, filename): """Open an index file. Args: filename: Path to the index file """ self._filename = filename # TODO(jelmer): Store the version returned by read_index self._version = None self.clear() self.read() @property def path(self): return self._filename def __repr__(self): return "%s(%r)" % (self.__class__.__name__, self._filename) def write(self) -> None: """Write current contents of index to disk.""" f = GitFile(self._filename, "wb") try: f = SHA1Writer(f) write_index_dict(f, self._byname, version=self._version) finally: f.close() def read(self): """Read current contents of index from disk.""" if not os.path.exists(self._filename): return f = GitFile(self._filename, "rb") try: f = SHA1Reader(f) for x in read_index(f): self[x[0]] = IndexEntry(*x[1:]) # FIXME: Additional data? f.read(os.path.getsize(self._filename) - f.tell() - 20) f.check_sha() finally: f.close() def __len__(self) -> int: """Number of entries in this index file.""" return len(self._byname) def __getitem__(self, name: bytes) -> IndexEntry: """Retrieve entry by relative path. Returns: tuple with (ctime, mtime, dev, ino, mode, uid, gid, size, sha, flags) """ return self._byname[name] def __iter__(self) -> Iterator[bytes]: """Iterate over the paths in this index.""" return iter(self._byname) def get_sha1(self, path: bytes) -> bytes: """Return the (git object) SHA1 for the object at a path.""" return self[path].sha def get_mode(self, path: bytes) -> int: """Return the POSIX file mode for the object at a path.""" return self[path].mode def iterobjects(self) -> Iterable[Tuple[bytes, bytes, int]]: """Iterate over path, sha, mode tuples for use with commit_tree.""" for path in self: entry = self[path] yield path, entry.sha, cleanup_mode(entry.mode) def iterblobs(self): import warnings warnings.warn("Use iterobjects() instead.", PendingDeprecationWarning) return self.iterobjects() def clear(self): """Remove all contents from this index.""" self._byname = {} def __setitem__(self, name, x): assert isinstance(name, bytes) assert len(x) == 10 # Remove the old entry if any self._byname[name] = IndexEntry(*x) def __delitem__(self, name): assert isinstance(name, bytes) del self._byname[name] def iteritems(self): return self._byname.items() def items(self): return self._byname.items() def update(self, entries): for name, value in entries.items(): self[name] = value def changes_from_tree(self, object_store, tree, want_unchanged=False): """Find the differences between the contents of this index and a tree. Args: object_store: Object store to use for retrieving tree contents tree: SHA1 of the root tree want_unchanged: Whether unchanged files should be reported Returns: Iterator over tuples with (oldpath, newpath), (oldmode, newmode), (oldsha, newsha) """ def lookup_entry(path): entry = self[path] return entry.sha, cleanup_mode(entry.mode) for (name, mode, sha) in changes_from_tree( self._byname.keys(), lookup_entry, object_store, tree, want_unchanged=want_unchanged, ): yield (name, mode, sha) def commit(self, object_store): """Create a new tree from an index. Args: object_store: Object store to save the tree in Returns: Root tree SHA """ return commit_tree(object_store, self.iterobjects()) def commit_tree( object_store: "BaseObjectStore", blobs: Iterable[Tuple[bytes, bytes, int]] ) -> bytes: """Commit a new tree. Args: object_store: Object store to add trees to blobs: Iterable over blob path, sha, mode entries Returns: SHA1 of the created tree. """ trees = {b"": {}} # type: Dict[bytes, Any] def add_tree(path): if path in trees: return trees[path] dirname, basename = pathsplit(path) t = add_tree(dirname) assert isinstance(basename, bytes) newtree = {} t[basename] = newtree trees[path] = newtree return newtree for path, sha, mode in blobs: tree_path, basename = pathsplit(path) tree = add_tree(tree_path) tree[basename] = (mode, sha) def build_tree(path): tree = Tree() for basename, entry in trees[path].items(): if isinstance(entry, dict): mode = stat.S_IFDIR sha = build_tree(pathjoin(path, basename)) else: (mode, sha) = entry tree.add(basename, mode, sha) object_store.add_object(tree) return tree.id return build_tree(b"") def commit_index(object_store: "BaseObjectStore", index: Index) -> bytes: """Create a new tree from an index. Args: object_store: Object store to save the tree in index: Index file Note: This function is deprecated, use index.commit() instead. Returns: Root tree sha. """ return commit_tree(object_store, index.iterobjects()) def changes_from_tree( names: Iterable[bytes], lookup_entry: Callable[[bytes], Tuple[bytes, int]], object_store: "BaseObjectStore", tree: Optional[bytes], want_unchanged=False, ) -> Iterable[ Tuple[ Tuple[Optional[bytes], Optional[bytes]], Tuple[Optional[int], Optional[int]], Tuple[Optional[bytes], Optional[bytes]], ] ]: """Find the differences between the contents of a tree and a working copy. Args: names: Iterable of names in the working copy lookup_entry: Function to lookup an entry in the working copy object_store: Object store to use for retrieving tree contents tree: SHA1 of the root tree, or None for an empty tree want_unchanged: Whether unchanged files should be reported Returns: Iterator over tuples with (oldpath, newpath), (oldmode, newmode), (oldsha, newsha) """ # TODO(jelmer): Support a include_trees option other_names = set(names) if tree is not None: for (name, mode, sha) in object_store.iter_tree_contents(tree): try: (other_sha, other_mode) = lookup_entry(name) except KeyError: # Was removed yield ((name, None), (mode, None), (sha, None)) else: other_names.remove(name) if want_unchanged or other_sha != sha or other_mode != mode: yield ((name, name), (mode, other_mode), (sha, other_sha)) # Mention added files for name in other_names: try: (other_sha, other_mode) = lookup_entry(name) except KeyError: pass else: yield ((None, name), (None, other_mode), (None, other_sha)) def index_entry_from_stat( stat_val, hex_sha: bytes, flags: int, mode: Optional[int] = None ): """Create a new index entry from a stat value. Args: stat_val: POSIX stat_result instance hex_sha: Hex sha of the object flags: Index flags """ if mode is None: mode = cleanup_mode(stat_val.st_mode) return IndexEntry( stat_val.st_ctime, stat_val.st_mtime, stat_val.st_dev, stat_val.st_ino, mode, stat_val.st_uid, stat_val.st_gid, stat_val.st_size, hex_sha, flags, ) def build_file_from_blob( blob, mode, target_path, honor_filemode=True, tree_encoding="utf-8" ): """Build a file or symlink on disk based on a Git object. Args: obj: The git object mode: File mode target_path: Path to write to honor_filemode: An optional flag to honor core.filemode setting in config file, default is core.filemode=True, change executable bit Returns: stat object for the file """ try: oldstat = os.lstat(target_path) except FileNotFoundError: oldstat = None contents = blob.as_raw_string() if stat.S_ISLNK(mode): # FIXME: This will fail on Windows. What should we do instead? if oldstat: os.unlink(target_path) if sys.platform == "win32": # os.readlink on Python3 on Windows requires a unicode string. contents = contents.decode(tree_encoding) target_path = target_path.decode(tree_encoding) os.symlink(contents, target_path) else: if oldstat is not None and oldstat.st_size == len(contents): with open(target_path, "rb") as f: if f.read() == contents: return oldstat with open(target_path, "wb") as f: # Write out file f.write(contents) if honor_filemode: os.chmod(target_path, mode) return os.lstat(target_path) INVALID_DOTNAMES = (b".git", b".", b"..", b"") def validate_path_element_default(element): return element.lower() not in INVALID_DOTNAMES def validate_path_element_ntfs(element): stripped = element.rstrip(b". ").lower() if stripped in INVALID_DOTNAMES: return False if stripped == b"git~1": return False return True def validate_path(path, element_validator=validate_path_element_default): """Default path validator that just checks for .git/.""" parts = path.split(b"/") for p in parts: if not element_validator(p): return False else: return True def build_index_from_tree( root_path, index_path, object_store, tree_id, honor_filemode=True, validate_path_element=validate_path_element_default, ): """Generate and materialize index from a tree Args: tree_id: Tree to materialize root_path: Target dir for materialized index files index_path: Target path for generated index object_store: Non-empty object store holding tree contents honor_filemode: An optional flag to honor core.filemode setting in config file, default is core.filemode=True, change executable bit validate_path_element: Function to validate path elements to check out; default just refuses .git and .. directories. Note: existing index is wiped and contents are not merged in a working dir. Suitable only for fresh clones. """ index = Index(index_path) if not isinstance(root_path, bytes): root_path = os.fsencode(root_path) for entry in object_store.iter_tree_contents(tree_id): if not validate_path(entry.path, validate_path_element): continue full_path = _tree_to_fs_path(root_path, entry.path) if not os.path.exists(os.path.dirname(full_path)): os.makedirs(os.path.dirname(full_path)) # TODO(jelmer): Merge new index into working tree if S_ISGITLINK(entry.mode): if not os.path.isdir(full_path): os.mkdir(full_path) st = os.lstat(full_path) # TODO(jelmer): record and return submodule paths else: obj = object_store[entry.sha] st = build_file_from_blob( obj, entry.mode, full_path, honor_filemode=honor_filemode ) # Add file to index if not honor_filemode or S_ISGITLINK(entry.mode): # we can not use tuple slicing to build a new tuple, # because on windows that will convert the times to # longs, which causes errors further along st_tuple = ( entry.mode, st.st_ino, st.st_dev, st.st_nlink, st.st_uid, st.st_gid, st.st_size, st.st_atime, st.st_mtime, st.st_ctime, ) st = st.__class__(st_tuple) index[entry.path] = index_entry_from_stat(st, entry.sha, 0) index.write() def blob_from_path_and_mode(fs_path, mode, tree_encoding="utf-8"): """Create a blob from a path and a stat object. Args: fs_path: Full file system path to file st: A stat object Returns: A `Blob` object """ assert isinstance(fs_path, bytes) blob = Blob() if stat.S_ISLNK(mode): if sys.platform == "win32": # os.readlink on Python3 on Windows requires a unicode string. fs_path = os.fsdecode(fs_path) blob.data = os.readlink(fs_path).encode(tree_encoding) else: blob.data = os.readlink(fs_path) else: with open(fs_path, "rb") as f: blob.data = f.read() return blob def blob_from_path_and_stat(fs_path, st, tree_encoding="utf-8"): """Create a blob from a path and a stat object. Args: fs_path: Full file system path to file st: A stat object Returns: A `Blob` object """ return blob_from_path_and_mode(fs_path, st.st_mode, tree_encoding) def read_submodule_head(path): """Read the head commit of a submodule. Args: path: path to the submodule Returns: HEAD sha, None if not a valid head/repository """ from dulwich.errors import NotGitRepository from dulwich.repo import Repo # Repo currently expects a "str", so decode if necessary. # TODO(jelmer): Perhaps move this into Repo() ? if not isinstance(path, str): path = os.fsdecode(path) try: repo = Repo(path) except NotGitRepository: return None try: return repo.head() except KeyError: return None def _has_directory_changed(tree_path, entry): """Check if a directory has changed after getting an error. When handling an error trying to create a blob from a path, call this function. It will check if the path is a directory. If it's a directory and a submodule, check the submodule head to see if it's has changed. If not, consider the file as changed as Git tracked a file and not a directory. Return true if the given path should be considered as changed and False otherwise or if the path is not a directory. """ # This is actually a directory if os.path.exists(os.path.join(tree_path, b".git")): # Submodule head = read_submodule_head(tree_path) if entry.sha != head: return True else: # The file was changed to a directory, so consider it removed. return True return False def get_unstaged_changes(index: Index, root_path, filter_blob_callback=None): """Walk through an index and check for differences against working tree. Args: index: index to check root_path: path in which to find files Returns: iterator over paths with unstaged changes """ # For each entry in the index check the sha1 & ensure not staged if not isinstance(root_path, bytes): root_path = os.fsencode(root_path) for tree_path, entry in index.iteritems(): full_path = _tree_to_fs_path(root_path, tree_path) try: st = os.lstat(full_path) if stat.S_ISDIR(st.st_mode): if _has_directory_changed(tree_path, entry): yield tree_path continue if not stat.S_ISREG(st.st_mode) and not stat.S_ISLNK(st.st_mode): continue blob = blob_from_path_and_stat(full_path, st) if filter_blob_callback is not None: blob = filter_blob_callback(blob, tree_path) except FileNotFoundError: # The file was removed, so we assume that counts as # different from whatever file used to exist. yield tree_path else: if blob.id != entry.sha: yield tree_path os_sep_bytes = os.sep.encode("ascii") def _tree_to_fs_path(root_path, tree_path: bytes): """Convert a git tree path to a file system path. Args: root_path: Root filesystem path tree_path: Git tree path as bytes Returns: File system path. """ assert isinstance(tree_path, bytes) if os_sep_bytes != b"/": sep_corrected_path = tree_path.replace(b"/", os_sep_bytes) else: sep_corrected_path = tree_path return os.path.join(root_path, sep_corrected_path) def _fs_to_tree_path(fs_path): """Convert a file system path to a git tree path. Args: fs_path: File system path. Returns: Git tree path as bytes """ if not isinstance(fs_path, bytes): fs_path_bytes = os.fsencode(fs_path) else: fs_path_bytes = fs_path if os_sep_bytes != b"/": tree_path = fs_path_bytes.replace(os_sep_bytes, b"/") else: tree_path = fs_path_bytes return tree_path def index_entry_from_path(path, object_store=None): """Create an index from a filesystem path. This returns an index value for files, symlinks and tree references. for directories and non-existant files it returns None Args: path: Path to create an index entry for object_store: Optional object store to save new blobs in Returns: An index entry; None for directories """ assert isinstance(path, bytes) st = os.lstat(path) if stat.S_ISDIR(st.st_mode): if os.path.exists(os.path.join(path, b".git")): head = read_submodule_head(path) if head is None: return None return index_entry_from_stat(st, head, 0, mode=S_IFGITLINK) return None if stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode): blob = blob_from_path_and_stat(path, st) if object_store is not None: object_store.add_object(blob) return index_entry_from_stat(st, blob.id, 0) return None def iter_fresh_entries( paths, root_path, object_store: Optional["BaseObjectStore"] = None ): """Iterate over current versions of index entries on disk. Args: paths: Paths to iterate over root_path: Root path to access from store: Optional store to save new blobs in Returns: Iterator over path, index_entry """ for path in paths: p = _tree_to_fs_path(root_path, path) try: entry = index_entry_from_path(p, object_store=object_store) except (FileNotFoundError, IsADirectoryError): entry = None yield path, entry def iter_fresh_blobs(index, root_path): """Iterate over versions of blobs on disk referenced by index. Don't use this function; it removes missing entries from index. Args: index: Index file root_path: Root path to access from include_deleted: Include deleted entries with sha and mode set to None Returns: Iterator over path, sha, mode """ import warnings warnings.warn(PendingDeprecationWarning, "Use iter_fresh_objects instead.") for entry in iter_fresh_objects(index, root_path, include_deleted=True): if entry[1] is None: del index[entry[0]] else: yield entry def iter_fresh_objects(paths, root_path, include_deleted=False, object_store=None): """Iterate over versions of objecs on disk referenced by index. Args: root_path: Root path to access from include_deleted: Include deleted entries with sha and mode set to None object_store: Optional object store to report new items to Returns: Iterator over path, sha, mode """ for path, entry in iter_fresh_entries(paths, root_path, object_store=object_store): if entry is None: if include_deleted: yield path, None, None else: entry = IndexEntry(*entry) yield path, entry.sha, cleanup_mode(entry.mode) def refresh_index(index, root_path): """Refresh the contents of an index. This is the equivalent to running 'git commit -a'. Args: index: Index to update root_path: Root filesystem path """ for path, entry in iter_fresh_entries(index, root_path): index[path] = path diff --git a/dulwich/object_store.py b/dulwich/object_store.py index bc752215..b5cd0e27 100644 --- a/dulwich/object_store.py +++ b/dulwich/object_store.py @@ -1,1464 +1,1473 @@ # object_store.py -- Object store for git objects # Copyright (C) 2008-2013 Jelmer Vernooij # and others # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Git object store interfaces and implementation.""" from io import BytesIO import os import stat import sys from dulwich.diff_tree import ( tree_changes, walk_trees, ) from dulwich.errors import ( NotTreeError, ) from dulwich.file import GitFile from dulwich.objects import ( Commit, ShaFile, Tag, Tree, ZERO_SHA, hex_to_sha, sha_to_hex, hex_to_filename, S_ISGITLINK, object_class, valid_hexsha, ) from dulwich.pack import ( Pack, PackData, PackInflater, PackFileDisappeared, iter_sha1, pack_objects_to_data, write_pack_header, write_pack_index_v2, write_pack_data, write_pack_object, compute_file_sha, PackIndexer, PackStreamCopier, ) from dulwich.refs import ANNOTATED_TAG_SUFFIX INFODIR = "info" PACKDIR = "pack" class BaseObjectStore(object): """Object store interface.""" def determine_wants_all(self, refs): return [ sha for (ref, sha) in refs.items() if sha not in self and not ref.endswith(ANNOTATED_TAG_SUFFIX) and not sha == ZERO_SHA ] def iter_shas(self, shas): """Iterate over the objects for the specified shas. Args: shas: Iterable object with SHAs Returns: Object iterator """ return ObjectStoreIterator(self, shas) def contains_loose(self, sha): """Check if a particular object is present by SHA1 and is loose.""" raise NotImplementedError(self.contains_loose) def contains_packed(self, sha): """Check if a particular object is present by SHA1 and is packed.""" raise NotImplementedError(self.contains_packed) def __contains__(self, sha): """Check if a particular object is present by SHA1. This method makes no distinction between loose and packed objects. """ return self.contains_packed(sha) or self.contains_loose(sha) @property def packs(self): """Iterable of pack objects.""" raise NotImplementedError def get_raw(self, name): """Obtain the raw text for an object. Args: name: sha for the object. Returns: tuple with numeric type and object contents. """ raise NotImplementedError(self.get_raw) def __getitem__(self, sha): """Obtain an object by SHA1.""" type_num, uncomp = self.get_raw(sha) return ShaFile.from_raw_string(type_num, uncomp, sha=sha) def __iter__(self): """Iterate over the SHAs that are present in this store.""" raise NotImplementedError(self.__iter__) def add_object(self, obj): """Add a single object to this object store.""" raise NotImplementedError(self.add_object) def add_objects(self, objects, progress=None): """Add a set of objects to this object store. Args: objects: Iterable over a list of (object, path) tuples """ raise NotImplementedError(self.add_objects) def add_pack_data(self, count, pack_data, progress=None): """Add pack data to this object store. Args: num_items: Number of items to add pack_data: Iterator over pack data tuples """ if count == 0: # Don't bother writing an empty pack file return f, commit, abort = self.add_pack() try: write_pack_data( f, count, pack_data, progress, compression_level=self.pack_compression_level, ) except BaseException: abort() raise else: return commit() def tree_changes( self, source, target, want_unchanged=False, include_trees=False, change_type_same=False, rename_detector=None, ): """Find the differences between the contents of two trees Args: source: SHA1 of the source tree target: SHA1 of the target tree want_unchanged: Whether unchanged files should be reported include_trees: Whether to include trees change_type_same: Whether to report files changing type in the same entry. Returns: Iterator over tuples with (oldpath, newpath), (oldmode, newmode), (oldsha, newsha) """ for change in tree_changes( self, source, target, want_unchanged=want_unchanged, include_trees=include_trees, change_type_same=change_type_same, rename_detector=rename_detector, ): yield ( (change.old.path, change.new.path), (change.old.mode, change.new.mode), (change.old.sha, change.new.sha), ) def iter_tree_contents(self, tree_id, include_trees=False): """Iterate the contents of a tree and all subtrees. Iteration is depth-first pre-order, as in e.g. os.walk. Args: tree_id: SHA1 of the tree. include_trees: If True, include tree objects in the iteration. Returns: Iterator over TreeEntry namedtuples for all the objects in a tree. """ for entry, _ in walk_trees(self, tree_id, None): if ( entry.mode is not None and not stat.S_ISDIR(entry.mode) ) or include_trees: yield entry def find_missing_objects( self, haves, wants, shallow=None, progress=None, get_tagged=None, get_parents=lambda commit: commit.parents, depth=None, ): """Find the missing objects required for a set of revisions. Args: haves: Iterable over SHAs already in common. wants: Iterable over SHAs of objects to fetch. shallow: Set of shallow commit SHA1s to skip progress: Simple progress function that will be called with updated progress strings. get_tagged: Function that returns a dict of pointed-to sha -> tag sha for including tags. get_parents: Optional function for getting the parents of a commit. Returns: Iterator over (sha, path) pairs. """ finder = MissingObjectFinder( - self, haves, wants, shallow, progress, get_tagged, get_parents=get_parents + self, + haves, + wants, + shallow, + progress, + get_tagged, + get_parents=get_parents, ) return iter(finder.next, None) def find_common_revisions(self, graphwalker): """Find which revisions this store has in common using graphwalker. Args: graphwalker: A graphwalker object. Returns: List of SHAs that are in common """ haves = [] sha = next(graphwalker) while sha: if sha in self: haves.append(sha) graphwalker.ack(sha) sha = next(graphwalker) return haves def generate_pack_contents(self, have, want, shallow=None, progress=None): """Iterate over the contents of a pack file. Args: have: List of SHA1s of objects that should not be sent want: List of SHA1s of objects that should be sent shallow: Set of shallow commit SHA1s to skip progress: Optional progress reporting method """ missing = self.find_missing_objects(have, want, shallow, progress) return self.iter_shas(missing) def generate_pack_data( self, have, want, shallow=None, progress=None, ofs_delta=True ): """Generate pack data objects for a set of wants/haves. Args: have: List of SHA1s of objects that should not be sent want: List of SHA1s of objects that should be sent shallow: Set of shallow commit SHA1s to skip ofs_delta: Whether OFS deltas can be included progress: Optional progress reporting method """ # TODO(jelmer): More efficient implementation return pack_objects_to_data( self.generate_pack_contents(have, want, shallow, progress) ) def peel_sha(self, sha): """Peel all tags from a SHA. Args: sha: The object SHA to peel. Returns: The fully-peeled SHA1 of a tag object, after peeling all intermediate tags; if the original ref does not point to a tag, this will equal the original SHA1. """ obj = self[sha] obj_class = object_class(obj.type_name) while obj_class is Tag: obj_class, sha = obj.object obj = self[sha] return obj def _collect_ancestors( self, heads, common=set(), shallow=set(), get_parents=lambda commit: commit.parents, ): """Collect all ancestors of heads up to (excluding) those in common. Args: heads: commits to start from common: commits to end at, or empty set to walk repository completely get_parents: Optional function for getting the parents of a commit. Returns: a tuple (A, B) where A - all commits reachable from heads but not present in common, B - common (shared) elements that are directly reachable from heads """ bases = set() commits = set() queue = [] queue.extend(heads) while queue: e = queue.pop(0) if e in common: bases.add(e) elif e not in commits: commits.add(e) if e in shallow: continue cmt = self[e] queue.extend(get_parents(cmt)) return (commits, bases) def close(self): """Close any files opened by this object store.""" # Default implementation is a NO-OP class PackBasedObjectStore(BaseObjectStore): def __init__(self, pack_compression_level=-1): self._pack_cache = {} self.pack_compression_level = pack_compression_level @property def alternates(self): return [] def contains_packed(self, sha): """Check if a particular object is present by SHA1 and is packed. This does not check alternates. """ for pack in self.packs: try: if sha in pack: return True except PackFileDisappeared: pass return False def __contains__(self, sha): """Check if a particular object is present by SHA1. This method makes no distinction between loose and packed objects. """ if self.contains_packed(sha) or self.contains_loose(sha): return True for alternate in self.alternates: if sha in alternate: return True return False def _add_cached_pack(self, base_name, pack): """Add a newly appeared pack to the cache by path.""" prev_pack = self._pack_cache.get(base_name) if prev_pack is not pack: self._pack_cache[base_name] = pack if prev_pack: prev_pack.close() def _clear_cached_packs(self): pack_cache = self._pack_cache self._pack_cache = {} while pack_cache: (name, pack) = pack_cache.popitem() pack.close() def _iter_cached_packs(self): return self._pack_cache.values() def _update_pack_cache(self): raise NotImplementedError(self._update_pack_cache) def close(self): self._clear_cached_packs() @property def packs(self): """List with pack objects.""" return list(self._iter_cached_packs()) + list(self._update_pack_cache()) def _iter_alternate_objects(self): """Iterate over the SHAs of all the objects in alternate stores.""" for alternate in self.alternates: for alternate_object in alternate: yield alternate_object def _iter_loose_objects(self): """Iterate over the SHAs of all loose objects.""" raise NotImplementedError(self._iter_loose_objects) def _get_loose_object(self, sha): raise NotImplementedError(self._get_loose_object) def _remove_loose_object(self, sha): raise NotImplementedError(self._remove_loose_object) def _remove_pack(self, name): raise NotImplementedError(self._remove_pack) def pack_loose_objects(self): """Pack loose objects. Returns: Number of objects packed """ objects = set() for sha in self._iter_loose_objects(): objects.add((self._get_loose_object(sha), None)) self.add_objects(list(objects)) for obj, path in objects: self._remove_loose_object(obj.id) return len(objects) def repack(self): """Repack the packs in this repository. Note that this implementation is fairly naive and currently keeps all objects in memory while it repacks. """ loose_objects = set() for sha in self._iter_loose_objects(): loose_objects.add(self._get_loose_object(sha)) objects = {(obj, None) for obj in loose_objects} old_packs = {p.name(): p for p in self.packs} for name, pack in old_packs.items(): objects.update((obj, None) for obj in pack.iterobjects()) # The name of the consolidated pack might match the name of a # pre-existing pack. Take care not to remove the newly created # consolidated pack. consolidated = self.add_objects(objects) old_packs.pop(consolidated.name(), None) for obj in loose_objects: self._remove_loose_object(obj.id) for name, pack in old_packs.items(): self._remove_pack(pack) self._update_pack_cache() return len(objects) def __iter__(self): """Iterate over the SHAs that are present in this store.""" self._update_pack_cache() for pack in self._iter_cached_packs(): try: for sha in pack: yield sha except PackFileDisappeared: pass for sha in self._iter_loose_objects(): yield sha for sha in self._iter_alternate_objects(): yield sha def contains_loose(self, sha): """Check if a particular object is present by SHA1 and is loose. This does not check alternates. """ return self._get_loose_object(sha) is not None def get_raw(self, name): """Obtain the raw fulltext for an object. Args: name: sha for the object. Returns: tuple with numeric type and object contents. """ if name == ZERO_SHA: raise KeyError(name) if len(name) == 40: sha = hex_to_sha(name) hexsha = name elif len(name) == 20: sha = name hexsha = None else: raise AssertionError("Invalid object name %r" % (name,)) for pack in self._iter_cached_packs(): try: return pack.get_raw(sha) except (KeyError, PackFileDisappeared): pass if hexsha is None: hexsha = sha_to_hex(name) ret = self._get_loose_object(hexsha) if ret is not None: return ret.type_num, ret.as_raw_string() # Maybe something else has added a pack with the object # in the mean time? for pack in self._update_pack_cache(): try: return pack.get_raw(sha) except KeyError: pass for alternate in self.alternates: try: return alternate.get_raw(hexsha) except KeyError: pass raise KeyError(hexsha) def add_objects(self, objects, progress=None): """Add a set of objects to this object store. Args: objects: Iterable over (object, path) tuples, should support __len__. Returns: Pack object of the objects written. """ return self.add_pack_data(*pack_objects_to_data(objects), progress=progress) class DiskObjectStore(PackBasedObjectStore): """Git-style object store that exists on disk.""" def __init__(self, path, loose_compression_level=-1, pack_compression_level=-1): """Open an object store. Args: path: Path of the object store. loose_compression_level: zlib compression level for loose objects pack_compression_level: zlib compression level for pack objects """ super(DiskObjectStore, self).__init__( pack_compression_level=pack_compression_level ) self.path = path self.pack_dir = os.path.join(self.path, PACKDIR) self._alternates = None self.loose_compression_level = loose_compression_level self.pack_compression_level = pack_compression_level def __repr__(self): return "<%s(%r)>" % (self.__class__.__name__, self.path) @classmethod def from_config(cls, path, config): try: default_compression_level = int( config.get((b"core",), b"compression").decode() ) except KeyError: default_compression_level = -1 try: loose_compression_level = int( config.get((b"core",), b"looseCompression").decode() ) except KeyError: loose_compression_level = default_compression_level try: pack_compression_level = int( config.get((b"core",), "packCompression").decode() ) except KeyError: pack_compression_level = default_compression_level return cls(path, loose_compression_level, pack_compression_level) @property def alternates(self): if self._alternates is not None: return self._alternates self._alternates = [] for path in self._read_alternate_paths(): self._alternates.append(DiskObjectStore(path)) return self._alternates def _read_alternate_paths(self): try: f = GitFile(os.path.join(self.path, INFODIR, "alternates"), "rb") except FileNotFoundError: return with f: for line in f.readlines(): line = line.rstrip(b"\n") if line.startswith(b"#"): continue if os.path.isabs(line): yield os.fsdecode(line) else: yield os.fsdecode(os.path.join(os.fsencode(self.path), line)) def add_alternate_path(self, path): """Add an alternate path to this object store.""" try: os.mkdir(os.path.join(self.path, INFODIR)) except FileExistsError: pass alternates_path = os.path.join(self.path, INFODIR, "alternates") with GitFile(alternates_path, "wb") as f: try: orig_f = open(alternates_path, "rb") except FileNotFoundError: pass else: with orig_f: f.write(orig_f.read()) f.write(os.fsencode(path) + b"\n") if not os.path.isabs(path): path = os.path.join(self.path, path) self.alternates.append(DiskObjectStore(path)) def _update_pack_cache(self): """Read and iterate over new pack files and cache them.""" try: pack_dir_contents = os.listdir(self.pack_dir) except FileNotFoundError: self.close() return [] pack_files = set() for name in pack_dir_contents: if name.startswith("pack-") and name.endswith(".pack"): # verify that idx exists first (otherwise the pack was not yet # fully written) idx_name = os.path.splitext(name)[0] + ".idx" if idx_name in pack_dir_contents: pack_name = name[: -len(".pack")] pack_files.add(pack_name) # Open newly appeared pack files new_packs = [] for f in pack_files: if f not in self._pack_cache: pack = Pack(os.path.join(self.pack_dir, f)) new_packs.append(pack) self._pack_cache[f] = pack # Remove disappeared pack files for f in set(self._pack_cache) - pack_files: self._pack_cache.pop(f).close() return new_packs def _get_shafile_path(self, sha): # Check from object dir return hex_to_filename(self.path, sha) def _iter_loose_objects(self): for base in os.listdir(self.path): if len(base) != 2: continue for rest in os.listdir(os.path.join(self.path, base)): sha = os.fsencode(base + rest) if not valid_hexsha(sha): continue yield sha def _get_loose_object(self, sha): path = self._get_shafile_path(sha) try: return ShaFile.from_path(path) except FileNotFoundError: return None def _remove_loose_object(self, sha): os.remove(self._get_shafile_path(sha)) def _remove_pack(self, pack): try: del self._pack_cache[os.path.basename(pack._basename)] except KeyError: pass pack.close() os.remove(pack.data.path) os.remove(pack.index.path) def _get_pack_basepath(self, entries): suffix = iter_sha1(entry[0] for entry in entries) # TODO: Handle self.pack_dir being bytes suffix = suffix.decode("ascii") return os.path.join(self.pack_dir, "pack-" + suffix) def _complete_thin_pack(self, f, path, copier, indexer): """Move a specific file containing a pack into the pack directory. Note: The file should be on the same file system as the packs directory. Args: f: Open file object for the pack. path: Path to the pack file. copier: A PackStreamCopier to use for writing pack data. indexer: A PackIndexer for indexing the pack. """ entries = list(indexer) # Update the header with the new number of objects. f.seek(0) write_pack_header(f, len(entries) + len(indexer.ext_refs())) # Must flush before reading (http://bugs.python.org/issue3207) f.flush() # Rescan the rest of the pack, computing the SHA with the new header. new_sha = compute_file_sha(f, end_ofs=-20) # Must reposition before writing (http://bugs.python.org/issue3207) f.seek(0, os.SEEK_CUR) # Complete the pack. for ext_sha in indexer.ext_refs(): assert len(ext_sha) == 20 type_num, data = self.get_raw(ext_sha) offset = f.tell() crc32 = write_pack_object( f, type_num, data, sha=new_sha, compression_level=self.pack_compression_level, ) entries.append((ext_sha, offset, crc32)) pack_sha = new_sha.digest() f.write(pack_sha) f.close() # Move the pack in. entries.sort() pack_base_name = self._get_pack_basepath(entries) target_pack = pack_base_name + ".pack" if sys.platform == "win32": # Windows might have the target pack file lingering. Attempt # removal, silently passing if the target does not exist. try: os.remove(target_pack) except FileNotFoundError: pass os.rename(path, target_pack) # Write the index. index_file = GitFile(pack_base_name + ".idx", "wb") try: write_pack_index_v2(index_file, entries, pack_sha) index_file.close() finally: index_file.abort() # Add the pack to the store and return it. final_pack = Pack(pack_base_name) final_pack.check_length_and_checksum() self._add_cached_pack(pack_base_name, final_pack) return final_pack def add_thin_pack(self, read_all, read_some): """Add a new thin pack to this object store. Thin packs are packs that contain deltas with parents that exist outside the pack. They should never be placed in the object store directly, and always indexed and completed as they are copied. Args: read_all: Read function that blocks until the number of requested bytes are read. read_some: Read function that returns at least one byte, but may not return the number of bytes requested. Returns: A Pack object pointing at the now-completed thin pack in the objects/pack directory. """ import tempfile fd, path = tempfile.mkstemp(dir=self.path, prefix="tmp_pack_") with os.fdopen(fd, "w+b") as f: indexer = PackIndexer(f, resolve_ext_ref=self.get_raw) copier = PackStreamCopier(read_all, read_some, f, delta_iter=indexer) copier.verify() return self._complete_thin_pack(f, path, copier, indexer) def move_in_pack(self, path): """Move a specific file containing a pack into the pack directory. Note: The file should be on the same file system as the packs directory. Args: path: Path to the pack file. """ with PackData(path) as p: entries = p.sorted_entries() basename = self._get_pack_basepath(entries) index_name = basename + ".idx" if not os.path.exists(index_name): with GitFile(index_name, "wb") as f: write_pack_index_v2(f, entries, p.get_stored_checksum()) for pack in self.packs: if pack._basename == basename: return pack target_pack = basename + ".pack" if sys.platform == "win32": # Windows might have the target pack file lingering. Attempt # removal, silently passing if the target does not exist. try: os.remove(target_pack) except FileNotFoundError: pass os.rename(path, target_pack) final_pack = Pack(basename) self._add_cached_pack(basename, final_pack) return final_pack def add_pack(self): """Add a new pack to this object store. Returns: Fileobject to write to, a commit function to call when the pack is finished and an abort function. """ import tempfile fd, path = tempfile.mkstemp(dir=self.pack_dir, suffix=".pack") f = os.fdopen(fd, "wb") def commit(): f.flush() os.fsync(fd) f.close() if os.path.getsize(path) > 0: return self.move_in_pack(path) else: os.remove(path) return None def abort(): f.close() os.remove(path) return f, commit, abort def add_object(self, obj): """Add a single object to this object store. Args: obj: Object to add """ path = self._get_shafile_path(obj.id) dir = os.path.dirname(path) try: os.mkdir(dir) except FileExistsError: pass if os.path.exists(path): return # Already there, no need to write again with GitFile(path, "wb") as f: f.write( obj.as_legacy_object(compression_level=self.loose_compression_level) ) @classmethod def init(cls, path): try: os.mkdir(path) except FileExistsError: pass os.mkdir(os.path.join(path, "info")) os.mkdir(os.path.join(path, PACKDIR)) return cls(path) class MemoryObjectStore(BaseObjectStore): """Object store that keeps all objects in memory.""" def __init__(self): super(MemoryObjectStore, self).__init__() self._data = {} self.pack_compression_level = -1 def _to_hexsha(self, sha): if len(sha) == 40: return sha elif len(sha) == 20: return sha_to_hex(sha) else: raise ValueError("Invalid sha %r" % (sha,)) def contains_loose(self, sha): """Check if a particular object is present by SHA1 and is loose.""" return self._to_hexsha(sha) in self._data def contains_packed(self, sha): """Check if a particular object is present by SHA1 and is packed.""" return False def __iter__(self): """Iterate over the SHAs that are present in this store.""" return iter(self._data.keys()) @property def packs(self): """List with pack objects.""" return [] def get_raw(self, name): """Obtain the raw text for an object. Args: name: sha for the object. Returns: tuple with numeric type and object contents. """ obj = self[self._to_hexsha(name)] return obj.type_num, obj.as_raw_string() def __getitem__(self, name): return self._data[self._to_hexsha(name)].copy() def __delitem__(self, name): """Delete an object from this store, for testing only.""" del self._data[self._to_hexsha(name)] def add_object(self, obj): """Add a single object to this object store.""" self._data[obj.id] = obj.copy() def add_objects(self, objects, progress=None): """Add a set of objects to this object store. Args: objects: Iterable over a list of (object, path) tuples """ for obj, path in objects: self.add_object(obj) def add_pack(self): """Add a new pack to this object store. Because this object store doesn't support packs, we extract and add the individual objects. Returns: Fileobject to write to and a commit function to call when the pack is finished. """ f = BytesIO() def commit(): p = PackData.from_file(BytesIO(f.getvalue()), f.tell()) f.close() for obj in PackInflater.for_pack_data(p, self.get_raw): self.add_object(obj) def abort(): pass return f, commit, abort def _complete_thin_pack(self, f, indexer): """Complete a thin pack by adding external references. Args: f: Open file object for the pack. indexer: A PackIndexer for indexing the pack. """ entries = list(indexer) # Update the header with the new number of objects. f.seek(0) write_pack_header(f, len(entries) + len(indexer.ext_refs())) # Rescan the rest of the pack, computing the SHA with the new header. new_sha = compute_file_sha(f, end_ofs=-20) # Complete the pack. for ext_sha in indexer.ext_refs(): assert len(ext_sha) == 20 type_num, data = self.get_raw(ext_sha) write_pack_object(f, type_num, data, sha=new_sha) pack_sha = new_sha.digest() f.write(pack_sha) def add_thin_pack(self, read_all, read_some): """Add a new thin pack to this object store. Thin packs are packs that contain deltas with parents that exist outside the pack. Because this object store doesn't support packs, we extract and add the individual objects. Args: read_all: Read function that blocks until the number of requested bytes are read. read_some: Read function that returns at least one byte, but may not return the number of bytes requested. """ f, commit, abort = self.add_pack() try: indexer = PackIndexer(f, resolve_ext_ref=self.get_raw) copier = PackStreamCopier(read_all, read_some, f, delta_iter=indexer) copier.verify() self._complete_thin_pack(f, indexer) except BaseException: abort() raise else: commit() class ObjectIterator(object): """Interface for iterating over objects.""" def iterobjects(self): raise NotImplementedError(self.iterobjects) class ObjectStoreIterator(ObjectIterator): """ObjectIterator that works on top of an ObjectStore.""" def __init__(self, store, sha_iter): """Create a new ObjectIterator. Args: store: Object store to retrieve from sha_iter: Iterator over (sha, path) tuples """ self.store = store self.sha_iter = sha_iter self._shas = [] def __iter__(self): """Yield tuple with next object and path.""" for sha, path in self.itershas(): yield self.store[sha], path def iterobjects(self): """Iterate over just the objects.""" for o, path in self: yield o def itershas(self): """Iterate over the SHAs.""" for sha in self._shas: yield sha for sha in self.sha_iter: self._shas.append(sha) yield sha def __contains__(self, needle): """Check if an object is present. Note: This checks if the object is present in the underlying object store, not if it would be yielded by the iterator. Args: needle: SHA1 of the object to check for """ if needle == ZERO_SHA: return False return needle in self.store def __getitem__(self, key): """Find an object by SHA1. Note: This retrieves the object from the underlying object store. It will also succeed if the object would not be returned by the iterator. """ return self.store[key] def __len__(self): """Return the number of objects.""" return len(list(self.itershas())) def empty(self): import warnings warnings.warn("Use bool() instead.", DeprecationWarning) return self._empty() def _empty(self): it = self.itershas() try: next(it) except StopIteration: return True else: return False def __bool__(self): """Indicate whether this object has contents.""" return not self._empty() def tree_lookup_path(lookup_obj, root_sha, path): """Look up an object in a Git tree. Args: lookup_obj: Callback for retrieving object by SHA1 root_sha: SHA1 of the root tree path: Path to lookup Returns: A tuple of (mode, SHA) of the resulting path. """ tree = lookup_obj(root_sha) if not isinstance(tree, Tree): raise NotTreeError(root_sha) return tree.lookup_path(lookup_obj, path) def _collect_filetree_revs(obj_store, tree_sha, kset): """Collect SHA1s of files and directories for specified tree. Args: obj_store: Object store to get objects by SHA from tree_sha: tree reference to walk kset: set to fill with references to files and directories """ filetree = obj_store[tree_sha] for name, mode, sha in filetree.iteritems(): if not S_ISGITLINK(mode) and sha not in kset: kset.add(sha) if stat.S_ISDIR(mode): _collect_filetree_revs(obj_store, sha, kset) def _split_commits_and_tags(obj_store, lst, ignore_unknown=False): """Split object id list into three lists with commit, tag, and other SHAs. Commits referenced by tags are included into commits list as well. Only SHA1s known in this repository will get through, and unless ignore_unknown argument is True, KeyError is thrown for SHA1 missing in the repository Args: obj_store: Object store to get objects by SHA1 from lst: Collection of commit and tag SHAs ignore_unknown: True to skip SHA1 missing in the repository silently. Returns: A tuple of (commits, tags, others) SHA1s """ commits = set() tags = set() others = set() for e in lst: try: o = obj_store[e] except KeyError: if not ignore_unknown: raise else: if isinstance(o, Commit): commits.add(e) elif isinstance(o, Tag): tags.add(e) tagged = o.object[1] c, t, o = _split_commits_and_tags( obj_store, [tagged], ignore_unknown=ignore_unknown ) commits |= c tags |= t others |= o else: others.add(e) return (commits, tags, others) class MissingObjectFinder(object): """Find the objects missing from another object store. Args: object_store: Object store containing at least all objects to be sent haves: SHA1s of commits not to send (already present in target) wants: SHA1s of commits to send progress: Optional function to report progress to. get_tagged: Function that returns a dict of pointed-to sha -> tag sha for including tags. get_parents: Optional function for getting the parents of a commit. tagged: dict of pointed-to sha -> tag sha for including tags """ def __init__( self, object_store, haves, wants, shallow=None, progress=None, get_tagged=None, get_parents=lambda commit: commit.parents, ): self.object_store = object_store if shallow is None: shallow = set() self._get_parents = get_parents # process Commits and Tags differently # Note, while haves may list commits/tags not available locally, # and such SHAs would get filtered out by _split_commits_and_tags, # wants shall list only known SHAs, and otherwise # _split_commits_and_tags fails with KeyError have_commits, have_tags, have_others = _split_commits_and_tags( object_store, haves, True ) want_commits, want_tags, want_others = _split_commits_and_tags( object_store, wants, False ) # all_ancestors is a set of commits that shall not be sent # (complete repository up to 'haves') all_ancestors = object_store._collect_ancestors( have_commits, shallow=shallow, get_parents=self._get_parents )[0] # all_missing - complete set of commits between haves and wants # common - commits from all_ancestors we hit into while # traversing parent hierarchy of wants missing_commits, common_commits = object_store._collect_ancestors( - want_commits, all_ancestors, shallow=shallow, get_parents=self._get_parents + want_commits, + all_ancestors, + shallow=shallow, + get_parents=self._get_parents, ) self.sha_done = set() # Now, fill sha_done with commits and revisions of # files and directories known to be both locally # and on target. Thus these commits and files # won't get selected for fetch for h in common_commits: self.sha_done.add(h) cmt = object_store[h] _collect_filetree_revs(object_store, cmt.tree, self.sha_done) # record tags we have as visited, too for t in have_tags: self.sha_done.add(t) missing_tags = want_tags.difference(have_tags) missing_others = want_others.difference(have_others) # in fact, what we 'want' is commits, tags, and others # we've found missing wants = missing_commits.union(missing_tags) wants = wants.union(missing_others) self.objects_to_send = set([(w, None, False) for w in wants]) if progress is None: self.progress = lambda x: None else: self.progress = progress self._tagged = get_tagged and get_tagged() or {} def add_todo(self, entries): self.objects_to_send.update([e for e in entries if not e[0] in self.sha_done]) def next(self): while True: if not self.objects_to_send: return None (sha, name, leaf) = self.objects_to_send.pop() if sha not in self.sha_done: break if not leaf: o = self.object_store[sha] if isinstance(o, Commit): self.add_todo([(o.tree, "", False)]) elif isinstance(o, Tree): self.add_todo( [ (s, n, not stat.S_ISDIR(m)) for n, m, s in o.iteritems() if not S_ISGITLINK(m) ] ) elif isinstance(o, Tag): self.add_todo([(o.object[1], None, False)]) if sha in self._tagged: self.add_todo([(self._tagged[sha], None, True)]) self.sha_done.add(sha) self.progress(("counting objects: %d\r" % len(self.sha_done)).encode("ascii")) return (sha, name) __next__ = next class ObjectStoreGraphWalker(object): """Graph walker that finds what commits are missing from an object store. :ivar heads: Revisions without descendants in the local repo :ivar get_parents: Function to retrieve parents in the local repo """ def __init__(self, local_heads, get_parents, shallow=None): """Create a new instance. Args: local_heads: Heads to start search with get_parents: Function for finding the parents of a SHA1. """ self.heads = set(local_heads) self.get_parents = get_parents self.parents = {} if shallow is None: shallow = set() self.shallow = shallow def ack(self, sha): """Ack that a revision and its ancestors are present in the source.""" if len(sha) != 40: raise ValueError("unexpected sha %r received" % sha) ancestors = set([sha]) # stop if we run out of heads to remove while self.heads: for a in ancestors: if a in self.heads: self.heads.remove(a) # collect all ancestors new_ancestors = set() for a in ancestors: ps = self.parents.get(a) if ps is not None: new_ancestors.update(ps) self.parents[a] = None # no more ancestors; stop if not new_ancestors: break ancestors = new_ancestors def next(self): """Iterate over ancestors of heads in the target.""" if self.heads: ret = self.heads.pop() ps = self.get_parents(ret) self.parents[ret] = ps self.heads.update([p for p in ps if p not in self.parents]) return ret return None __next__ = next def commit_tree_changes(object_store, tree, changes): """Commit a specified set of changes to a tree structure. This will apply a set of changes on top of an existing tree, storing new objects in object_store. changes are a list of tuples with (path, mode, object_sha). Paths can be both blobs and trees. See the mode and object sha to None deletes the path. This method works especially well if there are only a small number of changes to a big tree. For a large number of changes to a large tree, use e.g. commit_tree. Args: object_store: Object store to store new objects in and retrieve old ones from. tree: Original tree root changes: changes to apply Returns: New tree root object """ # TODO(jelmer): Save up the objects and add them using .add_objects # rather than with individual calls to .add_object. nested_changes = {} for (path, new_mode, new_sha) in changes: try: (dirname, subpath) = path.split(b"/", 1) except ValueError: if new_sha is None: del tree[path] else: tree[path] = (new_mode, new_sha) else: nested_changes.setdefault(dirname, []).append((subpath, new_mode, new_sha)) for name, subchanges in nested_changes.items(): try: orig_subtree = object_store[tree[name][1]] except KeyError: orig_subtree = Tree() subtree = commit_tree_changes(object_store, orig_subtree, subchanges) if len(subtree) == 0: del tree[name] else: tree[name] = (stat.S_IFDIR, subtree.id) object_store.add_object(tree) return tree class OverlayObjectStore(BaseObjectStore): """Object store that can overlay multiple object stores.""" def __init__(self, bases, add_store=None): self.bases = bases self.add_store = add_store def add_object(self, object): if self.add_store is None: raise NotImplementedError(self.add_object) return self.add_store.add_object(object) def add_objects(self, objects, progress=None): if self.add_store is None: raise NotImplementedError(self.add_object) return self.add_store.add_objects(objects, progress) @property def packs(self): ret = [] for b in self.bases: ret.extend(b.packs) return ret def __iter__(self): done = set() for b in self.bases: for o_id in b: if o_id not in done: yield o_id done.add(o_id) def get_raw(self, sha_id): for b in self.bases: try: return b.get_raw(sha_id) except KeyError: pass raise KeyError(sha_id) def contains_packed(self, sha): for b in self.bases: if b.contains_packed(sha): return True return False def contains_loose(self, sha): for b in self.bases: if b.contains_loose(sha): return True return False def read_packs_file(f): """Yield the packs listed in a packs file.""" for line in f.read().splitlines(): if not line: continue (kind, name) = line.split(b" ", 1) if kind != b"P": continue yield os.fsdecode(name) diff --git a/dulwich/objects.py b/dulwich/objects.py index 2ac4fdb7..9fb999a5 100644 --- a/dulwich/objects.py +++ b/dulwich/objects.py @@ -1,1494 +1,1505 @@ # objects.py -- Access to base git objects # Copyright (C) 2007 James Westby # Copyright (C) 2008-2013 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Access to base git objects.""" import binascii from io import BytesIO from collections import namedtuple import os import posixpath import stat from typing import ( Optional, Dict, Union, Type, ) import warnings import zlib from hashlib import sha1 from dulwich.errors import ( ChecksumMismatch, NotBlobError, NotCommitError, NotTagError, NotTreeError, ObjectFormatException, FileFormatException, ) from dulwich.file import GitFile ZERO_SHA = b"0" * 40 # Header fields for commits _TREE_HEADER = b"tree" _PARENT_HEADER = b"parent" _AUTHOR_HEADER = b"author" _COMMITTER_HEADER = b"committer" _ENCODING_HEADER = b"encoding" _MERGETAG_HEADER = b"mergetag" _GPGSIG_HEADER = b"gpgsig" # Header fields for objects _OBJECT_HEADER = b"object" _TYPE_HEADER = b"type" _TAG_HEADER = b"tag" _TAGGER_HEADER = b"tagger" S_IFGITLINK = 0o160000 MAX_TIME = 9223372036854775807 # (2**63) - 1 - signed long int max BEGIN_PGP_SIGNATURE = b"-----BEGIN PGP SIGNATURE-----" class EmptyFileException(FileFormatException): """An unexpectedly empty file was encountered.""" def S_ISGITLINK(m): """Check if a mode indicates a submodule. Args: m: Mode to check Returns: a ``boolean`` """ return stat.S_IFMT(m) == S_IFGITLINK def _decompress(string): dcomp = zlib.decompressobj() dcomped = dcomp.decompress(string) dcomped += dcomp.flush() return dcomped def sha_to_hex(sha): """Takes a string and returns the hex of the sha within""" hexsha = binascii.hexlify(sha) assert len(hexsha) == 40, "Incorrect length of sha1 string: %d" % hexsha return hexsha def hex_to_sha(hex): """Takes a hex sha and returns a binary sha""" assert len(hex) == 40, "Incorrect length of hexsha: %s" % hex try: return binascii.unhexlify(hex) except TypeError as exc: if not isinstance(hex, bytes): raise raise ValueError(exc.args[0]) def valid_hexsha(hex): if len(hex) != 40: return False try: binascii.unhexlify(hex) except (TypeError, binascii.Error): return False else: return True def hex_to_filename(path, hex): """Takes a hex sha and returns its filename relative to the given path.""" # os.path.join accepts bytes or unicode, but all args must be of the same # type. Make sure that hex which is expected to be bytes, is the same type # as path. if getattr(path, "encode", None) is not None: hex = hex.decode("ascii") dir = hex[:2] file = hex[2:] # Check from object dir return os.path.join(path, dir, file) def filename_to_hex(filename): """Takes an object filename and returns its corresponding hex sha.""" # grab the last (up to) two path components names = filename.rsplit(os.path.sep, 2)[-2:] errmsg = "Invalid object filename: %s" % filename assert len(names) == 2, errmsg base, rest = names assert len(base) == 2 and len(rest) == 38, errmsg hex = (base + rest).encode("ascii") hex_to_sha(hex) return hex def object_header(num_type: int, length: int) -> bytes: """Return an object header for the given numeric type and text length.""" return object_class(num_type).type_name + b" " + str(length).encode("ascii") + b"\0" def serializable_property(name: str, docstring: Optional[str] = None): """A property that helps tracking whether serialization is necessary.""" def set(obj, value): setattr(obj, "_" + name, value) obj._needs_serialization = True def get(obj): return getattr(obj, "_" + name) return property(get, set, doc=docstring) def object_class(type): """Get the object class corresponding to the given type. Args: type: Either a type name string or a numeric type. Returns: The ShaFile subclass corresponding to the given type, or None if type is not a valid type name/number. """ return _TYPE_MAP.get(type, None) def check_hexsha(hex, error_msg): """Check if a string is a valid hex sha string. Args: hex: Hex string to check error_msg: Error message to use in exception Raises: ObjectFormatException: Raised when the string is not valid """ if not valid_hexsha(hex): raise ObjectFormatException("%s %s" % (error_msg, hex)) def check_identity(identity, error_msg): """Check if the specified identity is valid. This will raise an exception if the identity is not valid. Args: identity: Identity string error_msg: Error message to use in exception """ email_start = identity.find(b"<") email_end = identity.find(b">") if ( email_start < 0 or email_end < 0 or email_end <= email_start or identity.find(b"<", email_start + 1) >= 0 or identity.find(b">", email_end + 1) >= 0 or not identity.endswith(b">") ): raise ObjectFormatException(error_msg) def check_time(time_seconds): """Check if the specified time is not prone to overflow error. This will raise an exception if the time is not valid. Args: time_info: author/committer/tagger info """ # Prevent overflow error if time_seconds > MAX_TIME: raise ObjectFormatException("Date field should not exceed %s" % MAX_TIME) def git_line(*items): """Formats items into a space separated line.""" return b" ".join(items) + b"\n" class FixedSha(object): """SHA object that behaves like hashlib's but is given a fixed value.""" __slots__ = ("_hexsha", "_sha") def __init__(self, hexsha): if getattr(hexsha, "encode", None) is not None: hexsha = hexsha.encode("ascii") if not isinstance(hexsha, bytes): raise TypeError("Expected bytes for hexsha, got %r" % hexsha) self._hexsha = hexsha self._sha = hex_to_sha(hexsha) def digest(self): """Return the raw SHA digest.""" return self._sha def hexdigest(self): """Return the hex SHA digest.""" return self._hexsha.decode("ascii") class ShaFile(object): """A git SHA file.""" __slots__ = ("_chunked_text", "_sha", "_needs_serialization") type_name = None # type: bytes type_num = None # type: int @staticmethod def _parse_legacy_object_header(magic, f): """Parse a legacy object, creating it but not reading the file.""" bufsize = 1024 decomp = zlib.decompressobj() header = decomp.decompress(magic) start = 0 end = -1 while end < 0: extra = f.read(bufsize) header += decomp.decompress(extra) magic += extra end = header.find(b"\0", start) start = len(header) header = header[:end] type_name, size = header.split(b" ", 1) try: int(size) # sanity check except ValueError as e: raise ObjectFormatException("Object size not an integer: %s" % e) obj_class = object_class(type_name) if not obj_class: raise ObjectFormatException("Not a known type: %s" % type_name) return obj_class() def _parse_legacy_object(self, map): """Parse a legacy object, setting the raw string.""" text = _decompress(map) header_end = text.find(b"\0") if header_end < 0: raise ObjectFormatException("Invalid object header, no \\0") self.set_raw_string(text[header_end + 1 :]) def as_legacy_object_chunks(self, compression_level=-1): """Return chunks representing the object in the experimental format. Returns: List of strings """ compobj = zlib.compressobj(compression_level) yield compobj.compress(self._header()) for chunk in self.as_raw_chunks(): yield compobj.compress(chunk) yield compobj.flush() def as_legacy_object(self, compression_level=-1): """Return string representing the object in the experimental format.""" return b"".join( self.as_legacy_object_chunks(compression_level=compression_level) ) def as_raw_chunks(self): """Return chunks with serialization of the object. Returns: List of strings, not necessarily one per line """ if self._needs_serialization: self._sha = None self._chunked_text = self._serialize() self._needs_serialization = False return self._chunked_text def as_raw_string(self): """Return raw string with serialization of the object. Returns: String object """ return b"".join(self.as_raw_chunks()) def __bytes__(self): """Return raw string serialization of this object.""" return self.as_raw_string() def __hash__(self): """Return unique hash for this object.""" return hash(self.id) def as_pretty_string(self): """Return a string representing this object, fit for display.""" return self.as_raw_string() def set_raw_string(self, text, sha=None): """Set the contents of this object from a serialized string.""" if not isinstance(text, bytes): raise TypeError("Expected bytes for text, got %r" % text) self.set_raw_chunks([text], sha) def set_raw_chunks(self, chunks, sha=None): """Set the contents of this object from a list of chunks.""" self._chunked_text = chunks self._deserialize(chunks) if sha is None: self._sha = None else: self._sha = FixedSha(sha) self._needs_serialization = False @staticmethod def _parse_object_header(magic, f): """Parse a new style object, creating it but not reading the file.""" num_type = (ord(magic[0:1]) >> 4) & 7 obj_class = object_class(num_type) if not obj_class: raise ObjectFormatException("Not a known type %d" % num_type) return obj_class() def _parse_object(self, map): """Parse a new style object, setting self._text.""" # skip type and size; type must have already been determined, and # we trust zlib to fail if it's otherwise corrupted byte = ord(map[0:1]) used = 1 while (byte & 0x80) != 0: byte = ord(map[used : used + 1]) used += 1 raw = map[used:] self.set_raw_string(_decompress(raw)) @classmethod def _is_legacy_object(cls, magic): b0 = ord(magic[0:1]) b1 = ord(magic[1:2]) word = (b0 << 8) + b1 return (b0 & 0x8F) == 0x08 and (word % 31) == 0 @classmethod def _parse_file(cls, f): map = f.read() if not map: raise EmptyFileException("Corrupted empty file detected") if cls._is_legacy_object(map): obj = cls._parse_legacy_object_header(map, f) obj._parse_legacy_object(map) else: obj = cls._parse_object_header(map, f) obj._parse_object(map) return obj def __init__(self): """Don't call this directly""" self._sha = None self._chunked_text = [] self._needs_serialization = True def _deserialize(self, chunks): raise NotImplementedError(self._deserialize) def _serialize(self): raise NotImplementedError(self._serialize) @classmethod def from_path(cls, path): """Open a SHA file from disk.""" with GitFile(path, "rb") as f: return cls.from_file(f) @classmethod def from_file(cls, f): """Get the contents of a SHA file on disk.""" try: obj = cls._parse_file(f) obj._sha = None return obj except (IndexError, ValueError): raise ObjectFormatException("invalid object header") @staticmethod def from_raw_string(type_num, string, sha=None): """Creates an object of the indicated type from the raw string given. Args: type_num: The numeric type of the object. string: The raw uncompressed contents. sha: Optional known sha for the object """ obj = object_class(type_num)() obj.set_raw_string(string, sha) return obj @staticmethod def from_raw_chunks(type_num, chunks, sha=None): """Creates an object of the indicated type from the raw chunks given. Args: type_num: The numeric type of the object. chunks: An iterable of the raw uncompressed contents. sha: Optional known sha for the object """ obj = object_class(type_num)() obj.set_raw_chunks(chunks, sha) return obj @classmethod def from_string(cls, string): """Create a ShaFile from a string.""" obj = cls() obj.set_raw_string(string) return obj def _check_has_member(self, member, error_msg): """Check that the object has a given member variable. Args: member: the member variable to check for error_msg: the message for an error if the member is missing Raises: ObjectFormatException: with the given error_msg if member is missing or is None """ if getattr(self, member, None) is None: raise ObjectFormatException(error_msg) def check(self): """Check this object for internal consistency. Raises: ObjectFormatException: if the object is malformed in some way ChecksumMismatch: if the object was created with a SHA that does not match its contents """ # TODO: if we find that error-checking during object parsing is a # performance bottleneck, those checks should be moved to the class's # check() method during optimization so we can still check the object # when necessary. old_sha = self.id try: self._deserialize(self.as_raw_chunks()) self._sha = None new_sha = self.id except Exception as e: raise ObjectFormatException(e) if old_sha != new_sha: raise ChecksumMismatch(new_sha, old_sha) def _header(self): return object_header(self.type, self.raw_length()) def raw_length(self): """Returns the length of the raw string of this object.""" ret = 0 for chunk in self.as_raw_chunks(): ret += len(chunk) return ret def sha(self): """The SHA1 object that is the name of this object.""" if self._sha is None or self._needs_serialization: # this is a local because as_raw_chunks() overwrites self._sha new_sha = sha1() new_sha.update(self._header()) for chunk in self.as_raw_chunks(): new_sha.update(chunk) self._sha = new_sha return self._sha def copy(self): """Create a new copy of this SHA1 object from its raw string""" obj_class = object_class(self.get_type()) return obj_class.from_raw_string(self.get_type(), self.as_raw_string(), self.id) @property def id(self): """The hex SHA of this object.""" return self.sha().hexdigest().encode("ascii") def get_type(self): """Return the type number for this object class.""" return self.type_num def set_type(self, type): """Set the type number for this object class.""" self.type_num = type # DEPRECATED: use type_num or type_name as needed. type = property(get_type, set_type) def __repr__(self): return "<%s %s>" % (self.__class__.__name__, self.id) def __ne__(self, other): """Check whether this object does not match the other.""" return not isinstance(other, ShaFile) or self.id != other.id def __eq__(self, other): """Return True if the SHAs of the two objects match.""" return isinstance(other, ShaFile) and self.id == other.id def __lt__(self, other): """Return whether SHA of this object is less than the other.""" if not isinstance(other, ShaFile): raise TypeError return self.id < other.id def __le__(self, other): """Check whether SHA of this object is less than or equal to the other.""" if not isinstance(other, ShaFile): raise TypeError return self.id <= other.id def __cmp__(self, other): """Compare the SHA of this object with that of the other object.""" if not isinstance(other, ShaFile): raise TypeError return cmp(self.id, other.id) # noqa: F821 class Blob(ShaFile): """A Git Blob object.""" __slots__ = () type_name = b"blob" type_num = 3 def __init__(self): super(Blob, self).__init__() self._chunked_text = [] self._needs_serialization = False def _get_data(self): return self.as_raw_string() def _set_data(self, data): self.set_raw_string(data) data = property( _get_data, _set_data, doc="The text contained within the blob object." ) def _get_chunked(self): return self._chunked_text def _set_chunked(self, chunks): self._chunked_text = chunks def _serialize(self): return self._chunked_text def _deserialize(self, chunks): self._chunked_text = chunks chunked = property( _get_chunked, _set_chunked, doc="The text in the blob object, as chunks (not necessarily lines)", ) @classmethod def from_path(cls, path): blob = ShaFile.from_path(path) if not isinstance(blob, cls): raise NotBlobError(path) return blob def check(self): """Check this object for internal consistency. Raises: ObjectFormatException: if the object is malformed in some way """ super(Blob, self).check() def splitlines(self): """Return list of lines in this blob. This preserves the original line endings. """ chunks = self.chunked if not chunks: return [] if len(chunks) == 1: return chunks[0].splitlines(True) remaining = None ret = [] for chunk in chunks: lines = chunk.splitlines(True) if len(lines) > 1: ret.append((remaining or b"") + lines[0]) ret.extend(lines[1:-1]) remaining = lines[-1] elif len(lines) == 1: if remaining is None: remaining = lines.pop() else: remaining += lines.pop() if remaining is not None: ret.append(remaining) return ret def _parse_message(chunks): """Parse a message with a list of fields and a body. Args: chunks: the raw chunks of the tag or commit object. Returns: iterator of tuples of (field, value), one per header line, in the order read from the text, possibly including duplicates. Includes a field named None for the freeform tag/commit text. """ f = BytesIO(b"".join(chunks)) k = None v = "" eof = False def _strip_last_newline(value): """Strip the last newline from value""" if value and value.endswith(b"\n"): return value[:-1] return value # Parse the headers # # Headers can contain newlines. The next line is indented with a space. # We store the latest key as 'k', and the accumulated value as 'v'. for line in f: if line.startswith(b" "): # Indented continuation of the previous line v += line[1:] else: if k is not None: # We parsed a new header, return its value yield (k, _strip_last_newline(v)) if line == b"\n": # Empty line indicates end of headers break (k, v) = line.split(b" ", 1) else: # We reached end of file before the headers ended. We still need to # return the previous header, then we need to return a None field for # the text. eof = True if k is not None: yield (k, _strip_last_newline(v)) yield (None, None) if not eof: # We didn't reach the end of file while parsing headers. We can return # the rest of the file as a message. yield (None, f.read()) f.close() class Tag(ShaFile): """A Git Tag object.""" type_name = b"tag" type_num = 4 __slots__ = ( "_tag_timezone_neg_utc", "_name", "_object_sha", "_object_class", "_tag_time", "_tag_timezone", "_tagger", "_message", "_signature", ) def __init__(self): super(Tag, self).__init__() self._tagger = None self._tag_time = None self._tag_timezone = None self._tag_timezone_neg_utc = False self._signature = None @classmethod def from_path(cls, filename): tag = ShaFile.from_path(filename) if not isinstance(tag, cls): raise NotTagError(filename) return tag def check(self): """Check this object for internal consistency. Raises: ObjectFormatException: if the object is malformed in some way """ super(Tag, self).check() self._check_has_member("_object_sha", "missing object sha") self._check_has_member("_object_class", "missing object type") self._check_has_member("_name", "missing tag name") if not self._name: raise ObjectFormatException("empty tag name") check_hexsha(self._object_sha, "invalid object sha") if getattr(self, "_tagger", None): check_identity(self._tagger, "invalid tagger") self._check_has_member("_tag_time", "missing tag time") check_time(self._tag_time) last = None for field, _ in _parse_message(self._chunked_text): if field == _OBJECT_HEADER and last is not None: raise ObjectFormatException("unexpected object") elif field == _TYPE_HEADER and last != _OBJECT_HEADER: raise ObjectFormatException("unexpected type") elif field == _TAG_HEADER and last != _TYPE_HEADER: raise ObjectFormatException("unexpected tag name") elif field == _TAGGER_HEADER and last != _TAG_HEADER: raise ObjectFormatException("unexpected tagger") last = field def _serialize(self): chunks = [] chunks.append(git_line(_OBJECT_HEADER, self._object_sha)) chunks.append(git_line(_TYPE_HEADER, self._object_class.type_name)) chunks.append(git_line(_TAG_HEADER, self._name)) if self._tagger: if self._tag_time is None: chunks.append(git_line(_TAGGER_HEADER, self._tagger)) else: chunks.append( git_line( _TAGGER_HEADER, self._tagger, str(self._tag_time).encode("ascii"), format_timezone(self._tag_timezone, self._tag_timezone_neg_utc), ) ) if self._message is not None: chunks.append(b"\n") # To close headers chunks.append(self._message) if self._signature is not None: chunks.append(self._signature) return chunks def _deserialize(self, chunks): """Grab the metadata attached to the tag""" self._tagger = None self._tag_time = None self._tag_timezone = None self._tag_timezone_neg_utc = False for field, value in _parse_message(chunks): if field == _OBJECT_HEADER: self._object_sha = value elif field == _TYPE_HEADER: obj_class = object_class(value) if not obj_class: raise ObjectFormatException("Not a known type: %s" % value) self._object_class = obj_class elif field == _TAG_HEADER: self._name = value elif field == _TAGGER_HEADER: ( self._tagger, self._tag_time, (self._tag_timezone, self._tag_timezone_neg_utc), ) = parse_time_entry(value) elif field is None: if value is None: self._message = None self._signature = None else: try: sig_idx = value.index(BEGIN_PGP_SIGNATURE) except ValueError: self._message = value self._signature = None else: self._message = value[:sig_idx] self._signature = value[sig_idx:] else: raise ObjectFormatException("Unknown field %s" % field) def _get_object(self): """Get the object pointed to by this tag. Returns: tuple of (object class, sha). """ return (self._object_class, self._object_sha) def _set_object(self, value): (self._object_class, self._object_sha) = value self._needs_serialization = True object = property(_get_object, _set_object) name = serializable_property("name", "The name of this tag") tagger = serializable_property( "tagger", "Returns the name of the person who created this tag" ) tag_time = serializable_property( "tag_time", "The creation timestamp of the tag. As the number of seconds " "since the epoch", ) tag_timezone = serializable_property( "tag_timezone", "The timezone that tag_time is in." ) message = serializable_property("message", "the message attached to this tag") signature = serializable_property("signature", "Optional detached GPG signature") class TreeEntry(namedtuple("TreeEntry", ["path", "mode", "sha"])): """Named tuple encapsulating a single tree entry.""" def in_path(self, path): """Return a copy of this entry with the given path prepended.""" if not isinstance(self.path, bytes): raise TypeError("Expected bytes for path, got %r" % path) return TreeEntry(posixpath.join(path, self.path), self.mode, self.sha) def parse_tree(text, strict=False): """Parse a tree text. Args: text: Serialized text to parse Returns: iterator of tuples of (name, mode, sha) Raises: ObjectFormatException: if the object was malformed in some way """ count = 0 length = len(text) while count < length: mode_end = text.index(b" ", count) mode_text = text[count:mode_end] if strict and mode_text.startswith(b"0"): raise ObjectFormatException("Invalid mode '%s'" % mode_text) try: mode = int(mode_text, 8) except ValueError: raise ObjectFormatException("Invalid mode '%s'" % mode_text) name_end = text.index(b"\0", mode_end) name = text[mode_end + 1 : name_end] count = name_end + 21 sha = text[name_end + 1 : count] if len(sha) != 20: raise ObjectFormatException("Sha has invalid length") hexsha = sha_to_hex(sha) yield (name, mode, hexsha) def serialize_tree(items): """Serialize the items in a tree to a text. Args: items: Sorted iterable over (name, mode, sha) tuples Returns: Serialized tree text as chunks """ for name, mode, hexsha in items: yield ( ("%04o" % mode).encode("ascii") + b" " + name + b"\0" + hex_to_sha(hexsha) ) def sorted_tree_items(entries, name_order): """Iterate over a tree entries dictionary. Args: name_order: If True, iterate entries in order of their name. If False, iterate entries in tree order, that is, treat subtree entries as having '/' appended. entries: Dictionary mapping names to (mode, sha) tuples Returns: Iterator over (name, mode, hexsha) """ key_func = name_order and key_entry_name_order or key_entry for name, entry in sorted(entries.items(), key=key_func): mode, hexsha = entry # Stricter type checks than normal to mirror checks in the C version. mode = int(mode) if not isinstance(hexsha, bytes): raise TypeError("Expected bytes for SHA, got %r" % hexsha) yield TreeEntry(name, mode, hexsha) def key_entry(entry): """Sort key for tree entry. Args: entry: (name, value) tuplee """ (name, value) = entry if stat.S_ISDIR(value[0]): name += b"/" return name def key_entry_name_order(entry): """Sort key for tree entry in name order.""" return entry[0] def pretty_format_tree_entry(name, mode, hexsha, encoding="utf-8"): """Pretty format tree entry. Args: name: Name of the directory entry mode: Mode of entry hexsha: Hexsha of the referenced object Returns: string describing the tree entry """ if mode & stat.S_IFDIR: kind = "tree" else: kind = "blob" return "%04o %s %s\t%s\n" % ( mode, kind, hexsha.decode("ascii"), name.decode(encoding, "replace"), ) class Tree(ShaFile): """A Git tree object""" type_name = b"tree" type_num = 2 __slots__ = "_entries" def __init__(self): super(Tree, self).__init__() self._entries = {} @classmethod def from_path(cls, filename): tree = ShaFile.from_path(filename) if not isinstance(tree, cls): raise NotTreeError(filename) return tree def __contains__(self, name): return name in self._entries def __getitem__(self, name): return self._entries[name] def __setitem__(self, name, value): """Set a tree entry by name. Args: name: The name of the entry, as a string. value: A tuple of (mode, hexsha), where mode is the mode of the entry as an integral type and hexsha is the hex SHA of the entry as a string. """ mode, hexsha = value self._entries[name] = (mode, hexsha) self._needs_serialization = True def __delitem__(self, name): del self._entries[name] self._needs_serialization = True def __len__(self): return len(self._entries) def __iter__(self): return iter(self._entries) def add(self, name, mode, hexsha): """Add an entry to the tree. Args: mode: The mode of the entry as an integral type. Not all possible modes are supported by git; see check() for details. name: The name of the entry, as a string. hexsha: The hex SHA of the entry as a string. """ if isinstance(name, int) and isinstance(mode, bytes): (name, mode) = (mode, name) warnings.warn( "Please use Tree.add(name, mode, hexsha)", category=DeprecationWarning, stacklevel=2, ) self._entries[name] = mode, hexsha self._needs_serialization = True def iteritems(self, name_order=False): """Iterate over entries. Args: name_order: If True, iterate in name order instead of tree order. Returns: Iterator over (name, mode, sha) tuples """ return sorted_tree_items(self._entries, name_order) def items(self): """Return the sorted entries in this tree. Returns: List with (name, mode, sha) tuples """ return list(self.iteritems()) def _deserialize(self, chunks): """Grab the entries in the tree""" try: parsed_entries = parse_tree(b"".join(chunks)) except ValueError as e: raise ObjectFormatException(e) # TODO: list comprehension is for efficiency in the common (small) # case; if memory efficiency in the large case is a concern, use a # genexp. self._entries = dict([(n, (m, s)) for n, m, s in parsed_entries]) def check(self): """Check this object for internal consistency. Raises: ObjectFormatException: if the object is malformed in some way """ super(Tree, self).check() last = None allowed_modes = ( stat.S_IFREG | 0o755, stat.S_IFREG | 0o644, stat.S_IFLNK, stat.S_IFDIR, S_IFGITLINK, # TODO: optionally exclude as in git fsck --strict stat.S_IFREG | 0o664, ) for name, mode, sha in parse_tree(b"".join(self._chunked_text), True): check_hexsha(sha, "invalid sha %s" % sha) if b"/" in name or name in (b"", b".", b"..", b".git"): raise ObjectFormatException( "invalid name %s" % name.decode("utf-8", "replace") ) if mode not in allowed_modes: raise ObjectFormatException("invalid mode %06o" % mode) entry = (name, (mode, sha)) if last: if key_entry(last) > key_entry(entry): raise ObjectFormatException("entries not sorted") if name == last[0]: raise ObjectFormatException("duplicate entry %s" % name) last = entry def _serialize(self): return list(serialize_tree(self.iteritems())) def as_pretty_string(self): text = [] for name, mode, hexsha in self.iteritems(): text.append(pretty_format_tree_entry(name, mode, hexsha)) return "".join(text) def lookup_path(self, lookup_obj, path): """Look up an object in a Git tree. Args: lookup_obj: Callback for retrieving object by SHA1 path: Path to lookup Returns: A tuple of (mode, SHA) of the resulting path. """ parts = path.split(b"/") sha = self.id mode = None for p in parts: if not p: continue obj = lookup_obj(sha) if not isinstance(obj, Tree): raise NotTreeError(sha) mode, sha = obj[p] return mode, sha def parse_timezone(text): """Parse a timezone text fragment (e.g. '+0100'). Args: text: Text to parse. Returns: Tuple with timezone as seconds difference to UTC and a boolean indicating whether this was a UTC timezone prefixed with a negative sign (-0000). """ # cgit parses the first character as the sign, and the rest # as an integer (using strtol), which could also be negative. # We do the same for compatibility. See #697828. if not text[0] in b"+-": raise ValueError("Timezone must start with + or - (%(text)s)" % vars()) sign = text[:1] offset = int(text[1:]) if sign == b"-": offset = -offset unnecessary_negative_timezone = offset >= 0 and sign == b"-" signum = (offset < 0) and -1 or 1 offset = abs(offset) hours = int(offset / 100) minutes = offset % 100 - return (signum * (hours * 3600 + minutes * 60), unnecessary_negative_timezone) + return ( + signum * (hours * 3600 + minutes * 60), + unnecessary_negative_timezone, + ) def format_timezone(offset, unnecessary_negative_timezone=False): """Format a timezone for Git serialization. Args: offset: Timezone offset as seconds difference to UTC unnecessary_negative_timezone: Whether to use a minus sign for UTC or positive timezones (-0000 and --700 rather than +0000 / +0700). """ if offset % 60 != 0: raise ValueError("Unable to handle non-minute offset.") if offset < 0 or unnecessary_negative_timezone: sign = "-" offset = -offset else: sign = "+" return ("%c%02d%02d" % (sign, offset / 3600, (offset / 60) % 60)).encode("ascii") def parse_time_entry(value): """Parse time entry behavior Args: value: Bytes representing a git commit/tag line Raises: ObjectFormatException in case of parsing error (malformed field date) Returns: Tuple of (author, time, (timezone, timezone_neg_utc)) """ try: sep = value.rindex(b"> ") except ValueError: return (value, None, (None, False)) try: person = value[0 : sep + 1] rest = value[sep + 2 :] timetext, timezonetext = rest.rsplit(b" ", 1) time = int(timetext) timezone, timezone_neg_utc = parse_timezone(timezonetext) except ValueError as e: raise ObjectFormatException(e) return person, time, (timezone, timezone_neg_utc) def parse_commit(chunks): """Parse a commit object from chunks. Args: chunks: Chunks to parse Returns: Tuple of (tree, parents, author_info, commit_info, encoding, mergetag, gpgsig, message, extra) """ parents = [] extra = [] tree = None author_info = (None, None, (None, None)) commit_info = (None, None, (None, None)) encoding = None mergetag = [] message = None gpgsig = None for field, value in _parse_message(chunks): # TODO(jelmer): Enforce ordering if field == _TREE_HEADER: tree = value elif field == _PARENT_HEADER: parents.append(value) elif field == _AUTHOR_HEADER: author_info = parse_time_entry(value) elif field == _COMMITTER_HEADER: commit_info = parse_time_entry(value) elif field == _ENCODING_HEADER: encoding = value elif field == _MERGETAG_HEADER: mergetag.append(Tag.from_string(value + b"\n")) elif field == _GPGSIG_HEADER: gpgsig = value elif field is None: message = value else: extra.append((field, value)) return ( tree, parents, author_info, commit_info, encoding, mergetag, gpgsig, message, extra, ) class Commit(ShaFile): """A git commit object""" type_name = b"commit" type_num = 1 __slots__ = ( "_parents", "_encoding", "_extra", "_author_timezone_neg_utc", "_commit_timezone_neg_utc", "_commit_time", "_author_time", "_author_timezone", "_commit_timezone", "_author", "_committer", "_tree", "_message", "_mergetag", "_gpgsig", ) def __init__(self): super(Commit, self).__init__() self._parents = [] self._encoding = None self._mergetag = [] self._gpgsig = None self._extra = [] self._author_timezone_neg_utc = False self._commit_timezone_neg_utc = False @classmethod def from_path(cls, path): commit = ShaFile.from_path(path) if not isinstance(commit, cls): raise NotCommitError(path) return commit def _deserialize(self, chunks): ( self._tree, self._parents, author_info, commit_info, self._encoding, self._mergetag, self._gpgsig, self._message, self._extra, ) = parse_commit(chunks) ( self._author, self._author_time, (self._author_timezone, self._author_timezone_neg_utc), ) = author_info ( self._committer, self._commit_time, (self._commit_timezone, self._commit_timezone_neg_utc), ) = commit_info def check(self): """Check this object for internal consistency. Raises: ObjectFormatException: if the object is malformed in some way """ super(Commit, self).check() self._check_has_member("_tree", "missing tree") self._check_has_member("_author", "missing author") self._check_has_member("_committer", "missing committer") self._check_has_member("_author_time", "missing author time") self._check_has_member("_commit_time", "missing commit time") for parent in self._parents: check_hexsha(parent, "invalid parent sha") check_hexsha(self._tree, "invalid tree sha") check_identity(self._author, "invalid author") check_identity(self._committer, "invalid committer") check_time(self._author_time) check_time(self._commit_time) last = None for field, _ in _parse_message(self._chunked_text): if field == _TREE_HEADER and last is not None: raise ObjectFormatException("unexpected tree") - elif field == _PARENT_HEADER and last not in (_PARENT_HEADER, _TREE_HEADER): + elif field == _PARENT_HEADER and last not in ( + _PARENT_HEADER, + _TREE_HEADER, + ): raise ObjectFormatException("unexpected parent") - elif field == _AUTHOR_HEADER and last not in (_TREE_HEADER, _PARENT_HEADER): + elif field == _AUTHOR_HEADER and last not in ( + _TREE_HEADER, + _PARENT_HEADER, + ): raise ObjectFormatException("unexpected author") elif field == _COMMITTER_HEADER and last != _AUTHOR_HEADER: raise ObjectFormatException("unexpected committer") elif field == _ENCODING_HEADER and last != _COMMITTER_HEADER: raise ObjectFormatException("unexpected encoding") last = field # TODO: optionally check for duplicate parents def _serialize(self): chunks = [] tree_bytes = self._tree.id if isinstance(self._tree, Tree) else self._tree chunks.append(git_line(_TREE_HEADER, tree_bytes)) for p in self._parents: chunks.append(git_line(_PARENT_HEADER, p)) chunks.append( git_line( _AUTHOR_HEADER, self._author, str(self._author_time).encode("ascii"), format_timezone(self._author_timezone, self._author_timezone_neg_utc), ) ) chunks.append( git_line( _COMMITTER_HEADER, self._committer, str(self._commit_time).encode("ascii"), format_timezone(self._commit_timezone, self._commit_timezone_neg_utc), ) ) if self.encoding: chunks.append(git_line(_ENCODING_HEADER, self.encoding)) for mergetag in self.mergetag: mergetag_chunks = mergetag.as_raw_string().split(b"\n") chunks.append(git_line(_MERGETAG_HEADER, mergetag_chunks[0])) # Embedded extra header needs leading space for chunk in mergetag_chunks[1:]: chunks.append(b" " + chunk + b"\n") # No trailing empty line if chunks[-1].endswith(b" \n"): chunks[-1] = chunks[-1][:-2] for k, v in self.extra: if b"\n" in k or b"\n" in v: raise AssertionError("newline in extra data: %r -> %r" % (k, v)) chunks.append(git_line(k, v)) if self.gpgsig: sig_chunks = self.gpgsig.split(b"\n") chunks.append(git_line(_GPGSIG_HEADER, sig_chunks[0])) for chunk in sig_chunks[1:]: chunks.append(git_line(b"", chunk)) chunks.append(b"\n") # There must be a new line after the headers chunks.append(self._message) return chunks tree = serializable_property("tree", "Tree that is the state of this commit") def _get_parents(self): """Return a list of parents of this commit.""" return self._parents def _set_parents(self, value): """Set a list of parents of this commit.""" self._needs_serialization = True self._parents = value parents = property( - _get_parents, _set_parents, doc="Parents of this commit, by their SHA1." + _get_parents, + _set_parents, + doc="Parents of this commit, by their SHA1.", ) def _get_extra(self): """Return extra settings of this commit.""" return self._extra extra = property( _get_extra, doc="Extra header fields not understood (presumably added in a " "newer version of git). Kept verbatim so the object can " "be correctly reserialized. For private commit metadata, use " "pseudo-headers in Commit.message, rather than this field.", ) author = serializable_property("author", "The name of the author of the commit") committer = serializable_property( "committer", "The name of the committer of the commit" ) message = serializable_property("message", "The commit message") commit_time = serializable_property( "commit_time", "The timestamp of the commit. As the number of seconds since the " "epoch.", ) commit_timezone = serializable_property( "commit_timezone", "The zone the commit time is in" ) author_time = serializable_property( "author_time", "The timestamp the commit was written. As the number of " "seconds since the epoch.", ) author_timezone = serializable_property( "author_timezone", "Returns the zone the author time is in." ) encoding = serializable_property("encoding", "Encoding of the commit message.") mergetag = serializable_property("mergetag", "Associated signed tag.") gpgsig = serializable_property("gpgsig", "GPG Signature.") OBJECT_CLASSES = ( Commit, Tree, Blob, Tag, ) _TYPE_MAP = {} # type: Dict[Union[bytes, int], Type[ShaFile]] for cls in OBJECT_CLASSES: _TYPE_MAP[cls.type_name] = cls _TYPE_MAP[cls.type_num] = cls # Hold on to the pure-python implementations for testing _parse_tree_py = parse_tree _sorted_tree_items_py = sorted_tree_items try: # Try to import C versions from dulwich._objects import parse_tree, sorted_tree_items # type: ignore except ImportError: pass diff --git a/dulwich/pack.py b/dulwich/pack.py index 5de592b9..92e3f358 100644 --- a/dulwich/pack.py +++ b/dulwich/pack.py @@ -1,2134 +1,2154 @@ # pack.py -- For dealing with packed git objects. # Copyright (C) 2007 James Westby # Copyright (C) 2008-2013 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Classes for dealing with packed git objects. A pack is a compact representation of a bunch of objects, stored using deltas where possible. They have two parts, the pack file, which stores the data, and an index that tells you where the data is. To find an object you look in all of the index files 'til you find a match for the object name. You then use the pointer got from this as a pointer in to the corresponding packfile. """ from collections import defaultdict import binascii from io import BytesIO, UnsupportedOperation from collections import ( deque, ) import difflib import struct from itertools import chain import os import sys from hashlib import sha1 from os import ( SEEK_CUR, SEEK_END, ) from struct import unpack_from import zlib try: import mmap except ImportError: has_mmap = False else: has_mmap = True # For some reason the above try, except fails to set has_mmap = False for plan9 if sys.platform == "Plan9": has_mmap = False from dulwich.errors import ( # noqa: E402 ApplyDeltaError, ChecksumMismatch, ) from dulwich.file import GitFile # noqa: E402 from dulwich.lru_cache import ( # noqa: E402 LRUSizeCache, ) from dulwich.objects import ( # noqa: E402 ShaFile, hex_to_sha, sha_to_hex, object_header, ) OFS_DELTA = 6 REF_DELTA = 7 DELTA_TYPES = (OFS_DELTA, REF_DELTA) DEFAULT_PACK_DELTA_WINDOW_SIZE = 10 def take_msb_bytes(read, crc32=None): """Read bytes marked with most significant bit. Args: read: Read function """ ret = [] while len(ret) == 0 or ret[-1] & 0x80: b = read(1) if crc32 is not None: crc32 = binascii.crc32(b, crc32) ret.append(ord(b[:1])) return ret, crc32 class PackFileDisappeared(Exception): def __init__(self, obj): self.obj = obj class UnpackedObject(object): """Class encapsulating an object unpacked from a pack file. These objects should only be created from within unpack_object. Most members start out as empty and are filled in at various points by read_zlib_chunks, unpack_object, DeltaChainIterator, etc. End users of this object should take care that the function they're getting this object from is guaranteed to set the members they need. """ __slots__ = [ "offset", # Offset in its pack. "_sha", # Cached binary SHA. "obj_type_num", # Type of this object. "obj_chunks", # Decompressed and delta-resolved chunks. "pack_type_num", # Type of this object in the pack (may be a delta). "delta_base", # Delta base offset or SHA. "comp_chunks", # Compressed object chunks. "decomp_chunks", # Decompressed object chunks. "decomp_len", # Decompressed length of this object. "crc32", # CRC32. ] # TODO(dborowitz): read_zlib_chunks and unpack_object could very well be # methods of this object. def __init__(self, pack_type_num, delta_base, decomp_len, crc32): self.offset = None self._sha = None self.pack_type_num = pack_type_num self.delta_base = delta_base self.comp_chunks = None self.decomp_chunks = [] self.decomp_len = decomp_len self.crc32 = crc32 if pack_type_num in DELTA_TYPES: self.obj_type_num = None self.obj_chunks = None else: self.obj_type_num = pack_type_num self.obj_chunks = self.decomp_chunks self.delta_base = delta_base def sha(self): """Return the binary SHA of this object.""" if self._sha is None: self._sha = obj_sha(self.obj_type_num, self.obj_chunks) return self._sha def sha_file(self): """Return a ShaFile from this object.""" return ShaFile.from_raw_chunks(self.obj_type_num, self.obj_chunks) # Only provided for backwards compatibility with code that expects either # chunks or a delta tuple. def _obj(self): """Return the decompressed chunks, or (delta base, delta chunks).""" if self.pack_type_num in DELTA_TYPES: return (self.delta_base, self.decomp_chunks) else: return self.decomp_chunks def __eq__(self, other): if not isinstance(other, UnpackedObject): return False for slot in self.__slots__: if getattr(self, slot) != getattr(other, slot): return False return True def __ne__(self, other): return not (self == other) def __repr__(self): data = ["%s=%r" % (s, getattr(self, s)) for s in self.__slots__] return "%s(%s)" % (self.__class__.__name__, ", ".join(data)) _ZLIB_BUFSIZE = 4096 def read_zlib_chunks( read_some, unpacked, include_comp=False, buffer_size=_ZLIB_BUFSIZE ): """Read zlib data from a buffer. This function requires that the buffer have additional data following the compressed data, which is guaranteed to be the case for git pack files. Args: read_some: Read function that returns at least one byte, but may return less than the requested size. unpacked: An UnpackedObject to write result data to. If its crc32 attr is not None, the CRC32 of the compressed bytes will be computed using this starting CRC32. After this function, will have the following attrs set: * comp_chunks (if include_comp is True) * decomp_chunks * decomp_len * crc32 include_comp: If True, include compressed data in the result. buffer_size: Size of the read buffer. Returns: Leftover unused data from the decompression. Raises: zlib.error: if a decompression error occurred. """ if unpacked.decomp_len <= -1: raise ValueError("non-negative zlib data stream size expected") decomp_obj = zlib.decompressobj() comp_chunks = [] decomp_chunks = unpacked.decomp_chunks decomp_len = 0 crc32 = unpacked.crc32 while True: add = read_some(buffer_size) if not add: raise zlib.error("EOF before end of zlib stream") comp_chunks.append(add) decomp = decomp_obj.decompress(add) decomp_len += len(decomp) decomp_chunks.append(decomp) unused = decomp_obj.unused_data if unused: left = len(unused) if crc32 is not None: crc32 = binascii.crc32(add[:-left], crc32) if include_comp: comp_chunks[-1] = add[:-left] break elif crc32 is not None: crc32 = binascii.crc32(add, crc32) if crc32 is not None: crc32 &= 0xFFFFFFFF if decomp_len != unpacked.decomp_len: raise zlib.error("decompressed data does not match expected size") unpacked.crc32 = crc32 if include_comp: unpacked.comp_chunks = comp_chunks return unused def iter_sha1(iter): """Return the hexdigest of the SHA1 over a set of names. Args: iter: Iterator over string objects Returns: 40-byte hex sha1 digest """ sha = sha1() for name in iter: sha.update(name) return sha.hexdigest().encode("ascii") def load_pack_index(path): """Load an index file by path. Args: filename: Path to the index file Returns: A PackIndex loaded from the given path """ with GitFile(path, "rb") as f: return load_pack_index_file(path, f) def _load_file_contents(f, size=None): try: fd = f.fileno() except (UnsupportedOperation, AttributeError): fd = None # Attempt to use mmap if possible if fd is not None: if size is None: size = os.fstat(fd).st_size if has_mmap: try: contents = mmap.mmap(fd, size, access=mmap.ACCESS_READ) except mmap.error: # Perhaps a socket? pass else: return contents, size contents = f.read() size = len(contents) return contents, size def load_pack_index_file(path, f): """Load an index file from a file-like object. Args: path: Path for the index file f: File-like object Returns: A PackIndex loaded from the given file """ contents, size = _load_file_contents(f) if contents[:4] == b"\377tOc": version = struct.unpack(b">L", contents[4:8])[0] if version == 2: return PackIndex2(path, file=f, contents=contents, size=size) else: raise KeyError("Unknown pack index format %d" % version) else: return PackIndex1(path, file=f, contents=contents, size=size) def bisect_find_sha(start, end, sha, unpack_name): """Find a SHA in a data blob with sorted SHAs. Args: start: Start index of range to search end: End index of range to search sha: Sha to find unpack_name: Callback to retrieve SHA by index Returns: Index of the SHA, or None if it wasn't found """ assert start <= end while start <= end: i = (start + end) // 2 file_sha = unpack_name(i) if file_sha < sha: start = i + 1 elif file_sha > sha: end = i - 1 else: return i return None class PackIndex(object): """An index in to a packfile. Given a sha id of an object a pack index can tell you the location in the packfile of that object if it has it. """ def __eq__(self, other): if not isinstance(other, PackIndex): return False for (name1, _, _), (name2, _, _) in zip( self.iterentries(), other.iterentries() ): if name1 != name2: return False return True def __ne__(self, other): return not self.__eq__(other) def __len__(self): """Return the number of entries in this pack index.""" raise NotImplementedError(self.__len__) def __iter__(self): """Iterate over the SHAs in this pack.""" return map(sha_to_hex, self._itersha()) def iterentries(self): """Iterate over the entries in this pack index. Returns: iterator over tuples with object name, offset in packfile and crc32 checksum. """ raise NotImplementedError(self.iterentries) def get_pack_checksum(self): """Return the SHA1 checksum stored for the corresponding packfile. Returns: 20-byte binary digest """ raise NotImplementedError(self.get_pack_checksum) def object_index(self, sha): """Return the index in to the corresponding packfile for the object. Given the name of an object it will return the offset that object lives at within the corresponding pack file. If the pack file doesn't have the object then None will be returned. """ if len(sha) == 40: sha = hex_to_sha(sha) try: return self._object_index(sha) except ValueError: closed = getattr(self._contents, "closed", None) if closed in (None, True): raise PackFileDisappeared(self) raise def object_sha1(self, index): """Return the SHA1 corresponding to the index in the pack file.""" # PERFORMANCE/TODO(jelmer): Avoid scanning entire index for (name, offset, crc32) in self.iterentries(): if offset == index: return name else: raise KeyError(index) def _object_index(self, sha): """See object_index. Args: sha: A *binary* SHA string. (20 characters long)_ """ raise NotImplementedError(self._object_index) def objects_sha1(self): """Return the hex SHA1 over all the shas of all objects in this pack. Note: This is used for the filename of the pack. """ return iter_sha1(self._itersha()) def _itersha(self): """Yield all the SHA1's of the objects in the index, sorted.""" raise NotImplementedError(self._itersha) class MemoryPackIndex(PackIndex): """Pack index that is stored entirely in memory.""" def __init__(self, entries, pack_checksum=None): """Create a new MemoryPackIndex. Args: entries: Sequence of name, idx, crc32 (sorted) pack_checksum: Optional pack checksum """ self._by_sha = {} self._by_index = {} for name, idx, crc32 in entries: self._by_sha[name] = idx self._by_index[idx] = name self._entries = entries self._pack_checksum = pack_checksum def get_pack_checksum(self): return self._pack_checksum def __len__(self): return len(self._entries) def _object_index(self, sha): return self._by_sha[sha][0] def object_sha1(self, index): return self._by_index[index] def _itersha(self): return iter(self._by_sha) def iterentries(self): return iter(self._entries) class FilePackIndex(PackIndex): """Pack index that is based on a file. To do the loop it opens the file, and indexes first 256 4 byte groups with the first byte of the sha id. The value in the four byte group indexed is the end of the group that shares the same starting byte. Subtract one from the starting byte and index again to find the start of the group. The values are sorted by sha id within the group, so do the math to find the start and end offset and then bisect in to find if the value is present. """ def __init__(self, filename, file=None, contents=None, size=None): """Create a pack index object. Provide it with the name of the index file to consider, and it will map it whenever required. """ self._filename = filename # Take the size now, so it can be checked each time we map the file to # ensure that it hasn't changed. if file is None: self._file = GitFile(filename, "rb") else: self._file = file if contents is None: self._contents, self._size = _load_file_contents(self._file, size) else: self._contents, self._size = (contents, size) @property def path(self): return self._filename def __eq__(self, other): # Quick optimization: if ( isinstance(other, FilePackIndex) and self._fan_out_table != other._fan_out_table ): return False return super(FilePackIndex, self).__eq__(other) def close(self): self._file.close() if getattr(self._contents, "close", None) is not None: self._contents.close() def __len__(self): """Return the number of entries in this pack index.""" return self._fan_out_table[-1] def _unpack_entry(self, i): """Unpack the i-th entry in the index file. Returns: Tuple with object name (SHA), offset in pack file and CRC32 checksum (if known). """ raise NotImplementedError(self._unpack_entry) def _unpack_name(self, i): """Unpack the i-th name from the index file.""" raise NotImplementedError(self._unpack_name) def _unpack_offset(self, i): """Unpack the i-th object offset from the index file.""" raise NotImplementedError(self._unpack_offset) def _unpack_crc32_checksum(self, i): """Unpack the crc32 checksum for the ith object from the index file.""" raise NotImplementedError(self._unpack_crc32_checksum) def _itersha(self): for i in range(len(self)): yield self._unpack_name(i) def iterentries(self): """Iterate over the entries in this pack index. Returns: iterator over tuples with object name, offset in packfile and crc32 checksum. """ for i in range(len(self)): yield self._unpack_entry(i) def _read_fan_out_table(self, start_offset): ret = [] for i in range(0x100): fanout_entry = self._contents[ start_offset + i * 4 : start_offset + (i + 1) * 4 ] ret.append(struct.unpack(">L", fanout_entry)[0]) return ret def check(self): """Check that the stored checksum matches the actual checksum.""" actual = self.calculate_checksum() stored = self.get_stored_checksum() if actual != stored: raise ChecksumMismatch(stored, actual) def calculate_checksum(self): """Calculate the SHA1 checksum over this pack index. Returns: This is a 20-byte binary digest """ return sha1(self._contents[:-20]).digest() def get_pack_checksum(self): """Return the SHA1 checksum stored for the corresponding packfile. Returns: 20-byte binary digest """ return bytes(self._contents[-40:-20]) def get_stored_checksum(self): """Return the SHA1 checksum stored for this index. Returns: 20-byte binary digest """ return bytes(self._contents[-20:]) def _object_index(self, sha): """See object_index. Args: sha: A *binary* SHA string. (20 characters long)_ """ assert len(sha) == 20 idx = ord(sha[:1]) if idx == 0: start = 0 else: start = self._fan_out_table[idx - 1] end = self._fan_out_table[idx] i = bisect_find_sha(start, end, sha, self._unpack_name) if i is None: raise KeyError(sha) return self._unpack_offset(i) class PackIndex1(FilePackIndex): """Version 1 Pack Index file.""" def __init__(self, filename, file=None, contents=None, size=None): super(PackIndex1, self).__init__(filename, file, contents, size) self.version = 1 self._fan_out_table = self._read_fan_out_table(0) def _unpack_entry(self, i): (offset, name) = unpack_from(">L20s", self._contents, (0x100 * 4) + (i * 24)) return (name, offset, None) def _unpack_name(self, i): offset = (0x100 * 4) + (i * 24) + 4 return self._contents[offset : offset + 20] def _unpack_offset(self, i): offset = (0x100 * 4) + (i * 24) return unpack_from(">L", self._contents, offset)[0] def _unpack_crc32_checksum(self, i): # Not stored in v1 index files return None class PackIndex2(FilePackIndex): """Version 2 Pack Index file.""" def __init__(self, filename, file=None, contents=None, size=None): super(PackIndex2, self).__init__(filename, file, contents, size) if self._contents[:4] != b"\377tOc": raise AssertionError("Not a v2 pack index file") (self.version,) = unpack_from(b">L", self._contents, 4) if self.version != 2: raise AssertionError("Version was %d" % self.version) self._fan_out_table = self._read_fan_out_table(8) self._name_table_offset = 8 + 0x100 * 4 self._crc32_table_offset = self._name_table_offset + 20 * len(self) self._pack_offset_table_offset = self._crc32_table_offset + 4 * len(self) self._pack_offset_largetable_offset = self._pack_offset_table_offset + 4 * len( self ) def _unpack_entry(self, i): return ( self._unpack_name(i), self._unpack_offset(i), self._unpack_crc32_checksum(i), ) def _unpack_name(self, i): offset = self._name_table_offset + i * 20 return self._contents[offset : offset + 20] def _unpack_offset(self, i): offset = self._pack_offset_table_offset + i * 4 offset = unpack_from(">L", self._contents, offset)[0] if offset & (2 ** 31): offset = self._pack_offset_largetable_offset + (offset & (2 ** 31 - 1)) * 8 offset = unpack_from(">Q", self._contents, offset)[0] return offset def _unpack_crc32_checksum(self, i): return unpack_from(">L", self._contents, self._crc32_table_offset + i * 4)[0] def read_pack_header(read): """Read the header of a pack file. Args: read: Read function Returns: Tuple of (pack version, number of objects). If no data is available to read, returns (None, None). """ header = read(12) if not header: return None, None if header[:4] != b"PACK": raise AssertionError("Invalid pack header %r" % header) (version,) = unpack_from(b">L", header, 4) if version not in (2, 3): raise AssertionError("Version was %d" % version) (num_objects,) = unpack_from(b">L", header, 8) return (version, num_objects) def chunks_length(chunks): if isinstance(chunks, bytes): return len(chunks) else: return sum(map(len, chunks)) def unpack_object( read_all, read_some=None, compute_crc32=False, include_comp=False, zlib_bufsize=_ZLIB_BUFSIZE, ): """Unpack a Git object. Args: read_all: Read function that blocks until the number of requested bytes are read. read_some: Read function that returns at least one byte, but may not return the number of bytes requested. compute_crc32: If True, compute the CRC32 of the compressed data. If False, the returned CRC32 will be None. include_comp: If True, include compressed data in the result. zlib_bufsize: An optional buffer size for zlib operations. Returns: A tuple of (unpacked, unused), where unused is the unused data leftover from decompression, and unpacked in an UnpackedObject with the following attrs set: * obj_chunks (for non-delta types) * pack_type_num * delta_base (for delta types) * comp_chunks (if include_comp is True) * decomp_chunks * decomp_len * crc32 (if compute_crc32 is True) """ if read_some is None: read_some = read_all if compute_crc32: crc32 = 0 else: crc32 = None bytes, crc32 = take_msb_bytes(read_all, crc32=crc32) type_num = (bytes[0] >> 4) & 0x07 size = bytes[0] & 0x0F for i, byte in enumerate(bytes[1:]): size += (byte & 0x7F) << ((i * 7) + 4) raw_base = len(bytes) if type_num == OFS_DELTA: bytes, crc32 = take_msb_bytes(read_all, crc32=crc32) raw_base += len(bytes) if bytes[-1] & 0x80: raise AssertionError delta_base_offset = bytes[0] & 0x7F for byte in bytes[1:]: delta_base_offset += 1 delta_base_offset <<= 7 delta_base_offset += byte & 0x7F delta_base = delta_base_offset elif type_num == REF_DELTA: delta_base = read_all(20) if compute_crc32: crc32 = binascii.crc32(delta_base, crc32) raw_base += 20 else: delta_base = None unpacked = UnpackedObject(type_num, delta_base, size, crc32) unused = read_zlib_chunks( - read_some, unpacked, buffer_size=zlib_bufsize, include_comp=include_comp + read_some, + unpacked, + buffer_size=zlib_bufsize, + include_comp=include_comp, ) return unpacked, unused def _compute_object_size(value): """Compute the size of a unresolved object for use with LRUSizeCache.""" (num, obj) = value if num in DELTA_TYPES: return chunks_length(obj[1]) return chunks_length(obj) class PackStreamReader(object): """Class to read a pack stream. The pack is read from a ReceivableProtocol using read() or recv() as appropriate. """ def __init__(self, read_all, read_some=None, zlib_bufsize=_ZLIB_BUFSIZE): self.read_all = read_all if read_some is None: self.read_some = read_all else: self.read_some = read_some self.sha = sha1() self._offset = 0 self._rbuf = BytesIO() # trailer is a deque to avoid memory allocation on small reads self._trailer = deque() self._zlib_bufsize = zlib_bufsize def _read(self, read, size): """Read up to size bytes using the given callback. As a side effect, update the verifier's hash (excluding the last 20 bytes read). Args: read: The read callback to read from. size: The maximum number of bytes to read; the particular behavior is callback-specific. """ data = read(size) # maintain a trailer of the last 20 bytes we've read n = len(data) self._offset += n tn = len(self._trailer) if n >= 20: to_pop = tn to_add = 20 else: to_pop = max(n + tn - 20, 0) to_add = n self.sha.update( bytes(bytearray([self._trailer.popleft() for _ in range(to_pop)])) ) self._trailer.extend(data[-to_add:]) # hash everything but the trailer self.sha.update(data[:-to_add]) return data def _buf_len(self): buf = self._rbuf start = buf.tell() buf.seek(0, SEEK_END) end = buf.tell() buf.seek(start) return end - start @property def offset(self): return self._offset - self._buf_len() def read(self, size): """Read, blocking until size bytes are read.""" buf_len = self._buf_len() if buf_len >= size: return self._rbuf.read(size) buf_data = self._rbuf.read() self._rbuf = BytesIO() return buf_data + self._read(self.read_all, size - buf_len) def recv(self, size): """Read up to size bytes, blocking until one byte is read.""" buf_len = self._buf_len() if buf_len: data = self._rbuf.read(size) if size >= buf_len: self._rbuf = BytesIO() return data return self._read(self.read_some, size) def __len__(self): return self._num_objects def read_objects(self, compute_crc32=False): """Read the objects in this pack file. Args: compute_crc32: If True, compute the CRC32 of the compressed data. If False, the returned CRC32 will be None. Returns: Iterator over UnpackedObjects with the following members set: offset obj_type_num obj_chunks (for non-delta types) delta_base (for delta types) decomp_chunks decomp_len crc32 (if compute_crc32 is True) Raises: ChecksumMismatch: if the checksum of the pack contents does not match the checksum in the pack trailer. zlib.error: if an error occurred during zlib decompression. IOError: if an error occurred writing to the output file. """ pack_version, self._num_objects = read_pack_header(self.read) if pack_version is None: return for i in range(self._num_objects): offset = self.offset unpacked, unused = unpack_object( self.read, read_some=self.recv, compute_crc32=compute_crc32, zlib_bufsize=self._zlib_bufsize, ) unpacked.offset = offset # prepend any unused data to current read buffer buf = BytesIO() buf.write(unused) buf.write(self._rbuf.read()) buf.seek(0) self._rbuf = buf yield unpacked if self._buf_len() < 20: # If the read buffer is full, then the last read() got the whole # trailer off the wire. If not, it means there is still some of the # trailer to read. We need to read() all 20 bytes; N come from the # read buffer and (20 - N) come from the wire. self.read(20) pack_sha = bytearray(self._trailer) if pack_sha != self.sha.digest(): raise ChecksumMismatch(sha_to_hex(pack_sha), self.sha.hexdigest()) class PackStreamCopier(PackStreamReader): """Class to verify a pack stream as it is being read. The pack is read from a ReceivableProtocol using read() or recv() as appropriate and written out to the given file-like object. """ def __init__(self, read_all, read_some, outfile, delta_iter=None): """Initialize the copier. Args: read_all: Read function that blocks until the number of requested bytes are read. read_some: Read function that returns at least one byte, but may not return the number of bytes requested. outfile: File-like object to write output through. delta_iter: Optional DeltaChainIterator to record deltas as we read them. """ super(PackStreamCopier, self).__init__(read_all, read_some=read_some) self.outfile = outfile self._delta_iter = delta_iter def _read(self, read, size): """Read data from the read callback and write it to the file.""" data = super(PackStreamCopier, self)._read(read, size) self.outfile.write(data) return data def verify(self): """Verify a pack stream and write it to the output file. See PackStreamReader.iterobjects for a list of exceptions this may throw. """ if self._delta_iter: for unpacked in self.read_objects(): self._delta_iter.record(unpacked) else: for _ in self.read_objects(): pass def obj_sha(type, chunks): """Compute the SHA for a numeric type and object chunks.""" sha = sha1() sha.update(object_header(type, chunks_length(chunks))) if isinstance(chunks, bytes): sha.update(chunks) else: for chunk in chunks: sha.update(chunk) return sha.digest() def compute_file_sha(f, start_ofs=0, end_ofs=0, buffer_size=1 << 16): """Hash a portion of a file into a new SHA. Args: f: A file-like object to read from that supports seek(). start_ofs: The offset in the file to start reading at. end_ofs: The offset in the file to end reading at, relative to the end of the file. buffer_size: A buffer size for reading. Returns: A new SHA object updated with data read from the file. """ sha = sha1() f.seek(0, SEEK_END) length = f.tell() if (end_ofs < 0 and length + end_ofs < start_ofs) or end_ofs > length: raise AssertionError( "Attempt to read beyond file length. " "start_ofs: %d, end_ofs: %d, file length: %d" % (start_ofs, end_ofs, length) ) todo = length + end_ofs - start_ofs f.seek(start_ofs) while todo: data = f.read(min(todo, buffer_size)) sha.update(data) todo -= len(data) return sha class PackData(object): """The data contained in a packfile. Pack files can be accessed both sequentially for exploding a pack, and directly with the help of an index to retrieve a specific object. The objects within are either complete or a delta against another. The header is variable length. If the MSB of each byte is set then it indicates that the subsequent byte is still part of the header. For the first byte the next MS bits are the type, which tells you the type of object, and whether it is a delta. The LS byte is the lowest bits of the size. For each subsequent byte the LS 7 bits are the next MS bits of the size, i.e. the last byte of the header contains the MS bits of the size. For the complete objects the data is stored as zlib deflated data. The size in the header is the uncompressed object size, so to uncompress you need to just keep feeding data to zlib until you get an object back, or it errors on bad data. This is done here by just giving the complete buffer from the start of the deflated object on. This is bad, but until I get mmap sorted out it will have to do. Currently there are no integrity checks done. Also no attempt is made to try and detect the delta case, or a request for an object at the wrong position. It will all just throw a zlib or KeyError. """ def __init__(self, filename, file=None, size=None): """Create a PackData object representing the pack in the given filename. The file must exist and stay readable until the object is disposed of. It must also stay the same size. It will be mapped whenever needed. Currently there is a restriction on the size of the pack as the python mmap implementation is flawed. """ self._filename = filename self._size = size self._header_size = 12 if file is None: self._file = GitFile(self._filename, "rb") else: self._file = file (version, self._num_objects) = read_pack_header(self._file.read) self._offset_cache = LRUSizeCache( 1024 * 1024 * 20, compute_size=_compute_object_size ) self.pack = None @property def filename(self): return os.path.basename(self._filename) @property def path(self): return self._filename @classmethod def from_file(cls, file, size=None): return cls(str(file), file=file, size=size) @classmethod def from_path(cls, path): return cls(filename=path) def close(self): self._file.close() def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.close() def __eq__(self, other): if isinstance(other, PackData): return self.get_stored_checksum() == other.get_stored_checksum() if isinstance(other, list): if len(self) != len(other): return False for o1, o2 in zip(self.iterobjects(), other): if o1 != o2: return False return True return False def _get_size(self): if self._size is not None: return self._size self._size = os.path.getsize(self._filename) if self._size < self._header_size: errmsg = "%s is too small for a packfile (%d < %d)" % ( self._filename, self._size, self._header_size, ) raise AssertionError(errmsg) return self._size def __len__(self): """Returns the number of objects in this pack.""" return self._num_objects def calculate_checksum(self): """Calculate the checksum for this pack. Returns: 20-byte binary SHA1 digest """ return compute_file_sha(self._file, end_ofs=-20).digest() def get_ref(self, sha): """Get the object for a ref SHA, only looking in this pack.""" # TODO: cache these results if self.pack is None: raise KeyError(sha) try: offset = self.pack.index.object_index(sha) except KeyError: offset = None if offset: type, obj = self.get_object_at(offset) elif self.pack is not None and self.pack.resolve_ext_ref: type, obj = self.pack.resolve_ext_ref(sha) else: raise KeyError(sha) return offset, type, obj def resolve_object(self, offset, type, obj, get_ref=None): """Resolve an object, possibly resolving deltas when necessary. Returns: Tuple with object type and contents. """ # Walk down the delta chain, building a stack of deltas to reach # the requested object. base_offset = offset base_type = type base_obj = obj delta_stack = [] while base_type in DELTA_TYPES: prev_offset = base_offset if get_ref is None: get_ref = self.get_ref if base_type == OFS_DELTA: (delta_offset, delta) = base_obj # TODO: clean up asserts and replace with nicer error messages base_offset = base_offset - delta_offset base_type, base_obj = self.get_object_at(base_offset) assert isinstance(base_type, int) elif base_type == REF_DELTA: (basename, delta) = base_obj assert isinstance(basename, bytes) and len(basename) == 20 base_offset, base_type, base_obj = get_ref(basename) assert isinstance(base_type, int) delta_stack.append((prev_offset, base_type, delta)) # Now grab the base object (mustn't be a delta) and apply the # deltas all the way up the stack. chunks = base_obj for prev_offset, delta_type, delta in reversed(delta_stack): chunks = apply_delta(chunks, delta) # TODO(dborowitz): This can result in poor performance if # large base objects are separated from deltas in the pack. # We should reorganize so that we apply deltas to all # objects in a chain one after the other to optimize cache # performance. if prev_offset is not None: self._offset_cache[prev_offset] = base_type, chunks return base_type, chunks def iterobjects(self, progress=None, compute_crc32=True): self._file.seek(self._header_size) for i in range(1, self._num_objects + 1): offset = self._file.tell() unpacked, unused = unpack_object( self._file.read, compute_crc32=compute_crc32 ) if progress is not None: progress(i, self._num_objects) - yield (offset, unpacked.pack_type_num, unpacked._obj(), unpacked.crc32) + yield ( + offset, + unpacked.pack_type_num, + unpacked._obj(), + unpacked.crc32, + ) # Back up over unused data. self._file.seek(-len(unused), SEEK_CUR) def _iter_unpacked(self): # TODO(dborowitz): Merge this with iterobjects, if we can change its # return type. self._file.seek(self._header_size) if self._num_objects is None: return for _ in range(self._num_objects): offset = self._file.tell() unpacked, unused = unpack_object(self._file.read, compute_crc32=False) unpacked.offset = offset yield unpacked # Back up over unused data. self._file.seek(-len(unused), SEEK_CUR) def iterentries(self, progress=None): """Yield entries summarizing the contents of this pack. Args: progress: Progress function, called with current and total object count. Returns: iterator of tuples with (sha, offset, crc32) """ num_objects = self._num_objects resolve_ext_ref = self.pack.resolve_ext_ref if self.pack is not None else None indexer = PackIndexer.for_pack_data(self, resolve_ext_ref=resolve_ext_ref) for i, result in enumerate(indexer): if progress is not None: progress(i, num_objects) yield result def sorted_entries(self, progress=None): """Return entries in this pack, sorted by SHA. Args: progress: Progress function, called with current and total object count Returns: List of tuples with (sha, offset, crc32) """ ret = sorted(self.iterentries(progress=progress)) return ret def create_index_v1(self, filename, progress=None): """Create a version 1 file for this data file. Args: filename: Index filename. progress: Progress report function Returns: Checksum of index file """ entries = self.sorted_entries(progress=progress) with GitFile(filename, "wb") as f: return write_pack_index_v1(f, entries, self.calculate_checksum()) def create_index_v2(self, filename, progress=None): """Create a version 2 index file for this data file. Args: filename: Index filename. progress: Progress report function Returns: Checksum of index file """ entries = self.sorted_entries(progress=progress) with GitFile(filename, "wb") as f: return write_pack_index_v2(f, entries, self.calculate_checksum()) def create_index(self, filename, progress=None, version=2): """Create an index file for this data file. Args: filename: Index filename. progress: Progress report function Returns: Checksum of index file """ if version == 1: return self.create_index_v1(filename, progress) elif version == 2: return self.create_index_v2(filename, progress) else: raise ValueError("unknown index format %d" % version) def get_stored_checksum(self): """Return the expected checksum stored in this pack.""" self._file.seek(-20, SEEK_END) return self._file.read(20) def check(self): """Check the consistency of this pack.""" actual = self.calculate_checksum() stored = self.get_stored_checksum() if actual != stored: raise ChecksumMismatch(stored, actual) def get_compressed_data_at(self, offset): """Given offset in the packfile return compressed data that is there. Using the associated index the location of an object can be looked up, and then the packfile can be asked directly for that object using this function. """ assert offset >= self._header_size self._file.seek(offset) unpacked, _ = unpack_object(self._file.read, include_comp=True) - return (unpacked.pack_type_num, unpacked.delta_base, unpacked.comp_chunks) + return ( + unpacked.pack_type_num, + unpacked.delta_base, + unpacked.comp_chunks, + ) def get_object_at(self, offset): """Given an offset in to the packfile return the object that is there. Using the associated index the location of an object can be looked up, and then the packfile can be asked directly for that object using this function. """ try: return self._offset_cache[offset] except KeyError: pass assert offset >= self._header_size self._file.seek(offset) unpacked, _ = unpack_object(self._file.read) return (unpacked.pack_type_num, unpacked._obj()) class DeltaChainIterator(object): """Abstract iterator over pack data based on delta chains. Each object in the pack is guaranteed to be inflated exactly once, regardless of how many objects reference it as a delta base. As a result, memory usage is proportional to the length of the longest delta chain. Subclasses can override _result to define the result type of the iterator. By default, results are UnpackedObjects with the following members set: * offset * obj_type_num * obj_chunks * pack_type_num * delta_base (for delta types) * comp_chunks (if _include_comp is True) * decomp_chunks * decomp_len * crc32 (if _compute_crc32 is True) """ _compute_crc32 = False _include_comp = False def __init__(self, file_obj, resolve_ext_ref=None): self._file = file_obj self._resolve_ext_ref = resolve_ext_ref self._pending_ofs = defaultdict(list) self._pending_ref = defaultdict(list) self._full_ofs = [] self._shas = {} self._ext_refs = [] @classmethod def for_pack_data(cls, pack_data, resolve_ext_ref=None): walker = cls(None, resolve_ext_ref=resolve_ext_ref) walker.set_pack_data(pack_data) for unpacked in pack_data._iter_unpacked(): walker.record(unpacked) return walker def record(self, unpacked): type_num = unpacked.pack_type_num offset = unpacked.offset if type_num == OFS_DELTA: base_offset = offset - unpacked.delta_base self._pending_ofs[base_offset].append(offset) elif type_num == REF_DELTA: self._pending_ref[unpacked.delta_base].append(offset) else: self._full_ofs.append((offset, type_num)) def set_pack_data(self, pack_data): self._file = pack_data._file def _walk_all_chains(self): for offset, type_num in self._full_ofs: for result in self._follow_chain(offset, type_num, None): yield result for result in self._walk_ref_chains(): yield result assert not self._pending_ofs def _ensure_no_pending(self): if self._pending_ref: raise KeyError([sha_to_hex(s) for s in self._pending_ref]) def _walk_ref_chains(self): if not self._resolve_ext_ref: self._ensure_no_pending() return for base_sha, pending in sorted(self._pending_ref.items()): if base_sha not in self._pending_ref: continue try: type_num, chunks = self._resolve_ext_ref(base_sha) except KeyError: # Not an external ref, but may depend on one. Either it will # get popped via a _follow_chain call, or we will raise an # error below. continue self._ext_refs.append(base_sha) self._pending_ref.pop(base_sha) for new_offset in pending: for result in self._follow_chain(new_offset, type_num, chunks): yield result self._ensure_no_pending() def _result(self, unpacked): return unpacked def _resolve_object(self, offset, obj_type_num, base_chunks): self._file.seek(offset) unpacked, _ = unpack_object( self._file.read, include_comp=self._include_comp, compute_crc32=self._compute_crc32, ) unpacked.offset = offset if base_chunks is None: assert unpacked.pack_type_num == obj_type_num else: assert unpacked.pack_type_num in DELTA_TYPES unpacked.obj_type_num = obj_type_num unpacked.obj_chunks = apply_delta(base_chunks, unpacked.decomp_chunks) return unpacked def _follow_chain(self, offset, obj_type_num, base_chunks): # Unlike PackData.get_object_at, there is no need to cache offsets as # this approach by design inflates each object exactly once. todo = [(offset, obj_type_num, base_chunks)] for offset, obj_type_num, base_chunks in todo: unpacked = self._resolve_object(offset, obj_type_num, base_chunks) yield self._result(unpacked) unblocked = chain( self._pending_ofs.pop(unpacked.offset, []), self._pending_ref.pop(unpacked.sha(), []), ) todo.extend( (new_offset, unpacked.obj_type_num, unpacked.obj_chunks) for new_offset in unblocked ) def __iter__(self): return self._walk_all_chains() def ext_refs(self): return self._ext_refs class PackIndexer(DeltaChainIterator): """Delta chain iterator that yields index entries.""" _compute_crc32 = True def _result(self, unpacked): return unpacked.sha(), unpacked.offset, unpacked.crc32 class PackInflater(DeltaChainIterator): """Delta chain iterator that yields ShaFile objects.""" def _result(self, unpacked): return unpacked.sha_file() class SHA1Reader(object): """Wrapper for file-like object that remembers the SHA1 of its data.""" def __init__(self, f): self.f = f self.sha1 = sha1(b"") def read(self, num=None): data = self.f.read(num) self.sha1.update(data) return data def check_sha(self): stored = self.f.read(20) if stored != self.sha1.digest(): raise ChecksumMismatch(self.sha1.hexdigest(), sha_to_hex(stored)) def close(self): return self.f.close() def tell(self): return self.f.tell() class SHA1Writer(object): """Wrapper for file-like object that remembers the SHA1 of its data.""" def __init__(self, f): self.f = f self.length = 0 self.sha1 = sha1(b"") def write(self, data): self.sha1.update(data) self.f.write(data) self.length += len(data) def write_sha(self): sha = self.sha1.digest() assert len(sha) == 20 self.f.write(sha) self.length += len(sha) return sha def close(self): sha = self.write_sha() self.f.close() return sha def offset(self): return self.length def tell(self): return self.f.tell() def pack_object_header(type_num, delta_base, size): """Create a pack object header for the given object info. Args: type_num: Numeric type of the object. delta_base: Delta base offset or ref, or None for whole objects. size: Uncompressed object size. Returns: A header for a packed object. """ header = [] c = (type_num << 4) | (size & 15) size >>= 4 while size: header.append(c | 0x80) c = size & 0x7F size >>= 7 header.append(c) if type_num == OFS_DELTA: ret = [delta_base & 0x7F] delta_base >>= 7 while delta_base: delta_base -= 1 ret.insert(0, 0x80 | (delta_base & 0x7F)) delta_base >>= 7 header.extend(ret) elif type_num == REF_DELTA: assert len(delta_base) == 20 header += delta_base return bytearray(header) def write_pack_object(f, type, object, sha=None, compression_level=-1): """Write pack object to a file. Args: f: File to write to type: Numeric type of the object object: Object to write compression_level: the zlib compression level Returns: Tuple with offset at which the object was written, and crc32 """ if type in DELTA_TYPES: delta_base, object = object else: delta_base = None header = bytes(pack_object_header(type, delta_base, len(object))) comp_data = zlib.compress(object, compression_level) crc32 = 0 for data in (header, comp_data): f.write(data) if sha is not None: sha.update(data) crc32 = binascii.crc32(data, crc32) return crc32 & 0xFFFFFFFF def write_pack( - filename, objects, deltify=None, delta_window_size=None, compression_level=-1 + filename, + objects, + deltify=None, + delta_window_size=None, + compression_level=-1, ): """Write a new pack data file. Args: filename: Path to the new pack file (without .pack extension) objects: Iterable of (object, path) tuples to write. Should provide __len__ window_size: Delta window size deltify: Whether to deltify pack objects compression_level: the zlib compression level Returns: Tuple with checksum of pack file and index file """ with GitFile(filename + ".pack", "wb") as f: entries, data_sum = write_pack_objects( f, objects, delta_window_size=delta_window_size, deltify=deltify, compression_level=compression_level, ) entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()]) with GitFile(filename + ".idx", "wb") as f: return data_sum, write_pack_index_v2(f, entries, data_sum) def write_pack_header(f, num_objects): """Write a pack header for the given number of objects.""" f.write(b"PACK") # Pack header f.write(struct.pack(b">L", 2)) # Pack version f.write(struct.pack(b">L", num_objects)) # Number of objects in pack def deltify_pack_objects(objects, window_size=None): """Generate deltas for pack objects. Args: objects: An iterable of (object, path) tuples to deltify. window_size: Window size; None for default Returns: Iterator over type_num, object id, delta_base, content delta_base is None for full text entries """ # TODO(jelmer): Use threads if window_size is None: window_size = DEFAULT_PACK_DELTA_WINDOW_SIZE # Build a list of objects ordered by the magic Linus heuristic # This helps us find good objects to diff against us magic = [] for obj, path in objects: magic.append((obj.type_num, path, -obj.raw_length(), obj)) magic.sort() possible_bases = deque() for type_num, path, neg_length, o in magic: raw = o.as_raw_string() winner = raw winner_base = None for base in possible_bases: if base.type_num != type_num: continue delta = create_delta(base.as_raw_string(), raw) if len(delta) < len(winner): winner_base = base.sha().digest() winner = delta yield type_num, o.sha().digest(), winner_base, winner possible_bases.appendleft(o) while len(possible_bases) > window_size: possible_bases.pop() def pack_objects_to_data(objects): """Create pack data from objects Args: objects: Pack objects Returns: Tuples with (type_num, hexdigest, delta base, object chunks) """ count = len(objects) return ( count, ( (o.type_num, o.sha().digest(), None, o.as_raw_string()) for (o, path) in objects ), ) def write_pack_objects( f, objects, delta_window_size=None, deltify=None, compression_level=-1 ): """Write a new pack data file. Args: f: File to write to objects: Iterable of (object, path) tuples to write. Should provide __len__ window_size: Sliding window size for searching for deltas; Set to None for default window size. deltify: Whether to deltify objects compression_level: the zlib compression level to use Returns: Dict mapping id -> (offset, crc32 checksum), pack checksum """ if deltify is None: # PERFORMANCE/TODO(jelmer): This should be enabled but is *much* too # slow at the moment. deltify = False if deltify: pack_contents = deltify_pack_objects(objects, delta_window_size) pack_contents_count = len(objects) else: pack_contents_count, pack_contents = pack_objects_to_data(objects) return write_pack_data( - f, pack_contents_count, pack_contents, compression_level=compression_level + f, + pack_contents_count, + pack_contents, + compression_level=compression_level, ) def write_pack_data(f, num_records, records, progress=None, compression_level=-1): """Write a new pack data file. Args: f: File to write to num_records: Number of records records: Iterator over type_num, object_id, delta_base, raw progress: Function to report progress to compression_level: the zlib compression level Returns: Dict mapping id -> (offset, crc32 checksum), pack checksum """ # Write the pack entries = {} f = SHA1Writer(f) write_pack_header(f, num_records) for i, (type_num, object_id, delta_base, raw) in enumerate(records): if progress is not None: progress(("writing pack data: %d/%d\r" % (i, num_records)).encode("ascii")) offset = f.offset() if delta_base is not None: try: base_offset, base_crc32 = entries[delta_base] except KeyError: type_num = REF_DELTA raw = (delta_base, raw) else: type_num = OFS_DELTA raw = (offset - base_offset, raw) crc32 = write_pack_object(f, type_num, raw, compression_level=compression_level) entries[object_id] = (offset, crc32) return entries, f.write_sha() def write_pack_index_v1(f, entries, pack_checksum): """Write a new pack index file. Args: f: A file-like object to write to entries: List of tuples with object name (sha), offset_in_pack, and crc32_checksum. pack_checksum: Checksum of the pack file. Returns: The SHA of the written index file """ f = SHA1Writer(f) fan_out_table = defaultdict(lambda: 0) for (name, offset, entry_checksum) in entries: fan_out_table[ord(name[:1])] += 1 # Fan-out table for i in range(0x100): f.write(struct.pack(">L", fan_out_table[i])) fan_out_table[i + 1] += fan_out_table[i] for (name, offset, entry_checksum) in entries: if not (offset <= 0xFFFFFFFF): raise TypeError("pack format 1 only supports offsets < 2Gb") f.write(struct.pack(">L20s", offset, name)) assert len(pack_checksum) == 20 f.write(pack_checksum) return f.write_sha() def _delta_encode_size(size): ret = bytearray() c = size & 0x7F size >>= 7 while size: ret.append(c | 0x80) c = size & 0x7F size >>= 7 ret.append(c) return ret # The length of delta compression copy operations in version 2 packs is limited # to 64K. To copy more, we use several copy operations. Version 3 packs allow # 24-bit lengths in copy operations, but we always make version 2 packs. _MAX_COPY_LEN = 0xFFFF def _encode_copy_operation(start, length): scratch = [] op = 0x80 for i in range(4): if start & 0xFF << i * 8: scratch.append((start >> i * 8) & 0xFF) op |= 1 << i for i in range(2): if length & 0xFF << i * 8: scratch.append((length >> i * 8) & 0xFF) op |= 1 << (4 + i) return bytearray([op] + scratch) def create_delta(base_buf, target_buf): """Use python difflib to work out how to transform base_buf to target_buf. Args: base_buf: Base buffer target_buf: Target buffer """ assert isinstance(base_buf, bytes) assert isinstance(target_buf, bytes) out_buf = bytearray() # write delta header out_buf += _delta_encode_size(len(base_buf)) out_buf += _delta_encode_size(len(target_buf)) # write out delta opcodes seq = difflib.SequenceMatcher(a=base_buf, b=target_buf) for opcode, i1, i2, j1, j2 in seq.get_opcodes(): # Git patch opcodes don't care about deletes! # if opcode == 'replace' or opcode == 'delete': # pass if opcode == "equal": # If they are equal, unpacker will use data from base_buf # Write out an opcode that says what range to use copy_start = i1 copy_len = i2 - i1 while copy_len > 0: to_copy = min(copy_len, _MAX_COPY_LEN) out_buf += _encode_copy_operation(copy_start, to_copy) copy_start += to_copy copy_len -= to_copy if opcode == "replace" or opcode == "insert": # If we are replacing a range or adding one, then we just # output it to the stream (prefixed by its size) s = j2 - j1 o = j1 while s > 127: out_buf.append(127) out_buf += bytearray(target_buf[o : o + 127]) s -= 127 o += 127 out_buf.append(s) out_buf += bytearray(target_buf[o : o + s]) return bytes(out_buf) def apply_delta(src_buf, delta): """Based on the similar function in git's patch-delta.c. Args: src_buf: Source buffer delta: Delta instructions """ if not isinstance(src_buf, bytes): src_buf = b"".join(src_buf) if not isinstance(delta, bytes): delta = b"".join(delta) out = [] index = 0 delta_length = len(delta) def get_delta_header_size(delta, index): size = 0 i = 0 while delta: cmd = ord(delta[index : index + 1]) index += 1 size |= (cmd & ~0x80) << i i += 7 if not cmd & 0x80: break return size, index src_size, index = get_delta_header_size(delta, index) dest_size, index = get_delta_header_size(delta, index) assert src_size == len(src_buf), "%d vs %d" % (src_size, len(src_buf)) while index < delta_length: cmd = ord(delta[index : index + 1]) index += 1 if cmd & 0x80: cp_off = 0 for i in range(4): if cmd & (1 << i): x = ord(delta[index : index + 1]) index += 1 cp_off |= x << (i * 8) cp_size = 0 # Version 3 packs can contain copy sizes larger than 64K. for i in range(3): if cmd & (1 << (4 + i)): x = ord(delta[index : index + 1]) index += 1 cp_size |= x << (i * 8) if cp_size == 0: cp_size = 0x10000 if ( cp_off + cp_size < cp_size or cp_off + cp_size > src_size or cp_size > dest_size ): break out.append(src_buf[cp_off : cp_off + cp_size]) elif cmd != 0: out.append(delta[index : index + cmd]) index += cmd else: raise ApplyDeltaError("Invalid opcode 0") if index != delta_length: raise ApplyDeltaError("delta not empty: %r" % delta[index:]) if dest_size != chunks_length(out): raise ApplyDeltaError("dest size incorrect") return out def write_pack_index_v2(f, entries, pack_checksum): """Write a new pack index file. Args: f: File-like object to write to entries: List of tuples with object name (sha), offset_in_pack, and crc32_checksum. pack_checksum: Checksum of the pack file. Returns: The SHA of the index file written """ f = SHA1Writer(f) f.write(b"\377tOc") # Magic! f.write(struct.pack(">L", 2)) fan_out_table = defaultdict(lambda: 0) for (name, offset, entry_checksum) in entries: fan_out_table[ord(name[:1])] += 1 # Fan-out table largetable = [] for i in range(0x100): f.write(struct.pack(b">L", fan_out_table[i])) fan_out_table[i + 1] += fan_out_table[i] for (name, offset, entry_checksum) in entries: f.write(name) for (name, offset, entry_checksum) in entries: f.write(struct.pack(b">L", entry_checksum)) for (name, offset, entry_checksum) in entries: if offset < 2 ** 31: f.write(struct.pack(b">L", offset)) else: f.write(struct.pack(b">L", 2 ** 31 + len(largetable))) largetable.append(offset) for offset in largetable: f.write(struct.pack(b">Q", offset)) assert len(pack_checksum) == 20 f.write(pack_checksum) return f.write_sha() write_pack_index = write_pack_index_v2 class Pack(object): """A Git pack object.""" def __init__(self, basename, resolve_ext_ref=None): self._basename = basename self._data = None self._idx = None self._idx_path = self._basename + ".idx" self._data_path = self._basename + ".pack" self._data_load = lambda: PackData(self._data_path) self._idx_load = lambda: load_pack_index(self._idx_path) self.resolve_ext_ref = resolve_ext_ref @classmethod def from_lazy_objects(cls, data_fn, idx_fn): """Create a new pack object from callables to load pack data and index objects.""" ret = cls("") ret._data_load = data_fn ret._idx_load = idx_fn return ret @classmethod def from_objects(cls, data, idx): """Create a new pack object from pack data and index objects.""" ret = cls("") ret._data = data ret._data.pack = ret ret._data_load = None ret._idx = idx ret._idx_load = None ret.check_length_and_checksum() return ret def name(self): """The SHA over the SHAs of the objects in this pack.""" return self.index.objects_sha1() @property def data(self): """The pack data object being used.""" if self._data is None: self._data = self._data_load() self._data.pack = self self.check_length_and_checksum() return self._data @property def index(self): """The index being used. Note: This may be an in-memory index """ if self._idx is None: self._idx = self._idx_load() return self._idx def close(self): if self._data is not None: self._data.close() if self._idx is not None: self._idx.close() def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.close() def __eq__(self, other): return isinstance(self, type(other)) and self.index == other.index def __len__(self): """Number of entries in this pack.""" return len(self.index) def __repr__(self): return "%s(%r)" % (self.__class__.__name__, self._basename) def __iter__(self): """Iterate over all the sha1s of the objects in this pack.""" return iter(self.index) def check_length_and_checksum(self): """Sanity check the length and checksum of the pack index and data.""" assert len(self.index) == len(self.data) idx_stored_checksum = self.index.get_pack_checksum() data_stored_checksum = self.data.get_stored_checksum() if idx_stored_checksum != data_stored_checksum: raise ChecksumMismatch( - sha_to_hex(idx_stored_checksum), sha_to_hex(data_stored_checksum) + sha_to_hex(idx_stored_checksum), + sha_to_hex(data_stored_checksum), ) def check(self): """Check the integrity of this pack. Raises: ChecksumMismatch: if a checksum for the index or data is wrong """ self.index.check() self.data.check() for obj in self.iterobjects(): obj.check() # TODO: object connectivity checks def get_stored_checksum(self): return self.data.get_stored_checksum() def __contains__(self, sha1): """Check whether this pack contains a particular SHA1.""" try: self.index.object_index(sha1) return True except KeyError: return False def get_raw_unresolved(self, sha1): """Get raw unresolved data for a SHA. Args: sha1: SHA to return data for Returns: Tuple with pack object type, delta base (if applicable), list of data chunks """ offset = self.index.object_index(sha1) (obj_type, delta_base, chunks) = self.data.get_compressed_data_at(offset) if obj_type == OFS_DELTA: delta_base = sha_to_hex(self.index.object_sha1(offset - delta_base)) obj_type = REF_DELTA return (obj_type, delta_base, chunks) def get_raw(self, sha1): offset = self.index.object_index(sha1) obj_type, obj = self.data.get_object_at(offset) type_num, chunks = self.data.resolve_object(offset, obj_type, obj) return type_num, b"".join(chunks) def __getitem__(self, sha1): """Retrieve the specified SHA1.""" type, uncomp = self.get_raw(sha1) return ShaFile.from_raw_string(type, uncomp, sha=sha1) def iterobjects(self): """Iterate over the objects in this pack.""" return iter( PackInflater.for_pack_data(self.data, resolve_ext_ref=self.resolve_ext_ref) ) def pack_tuples(self): """Provide an iterable for use with write_pack_objects. Returns: Object that can iterate over (object, path) tuples and provides __len__ """ class PackTupleIterable(object): def __init__(self, pack): self.pack = pack def __len__(self): return len(self.pack) def __iter__(self): return ((o, None) for o in self.pack.iterobjects()) return PackTupleIterable(self) def keep(self, msg=None): """Add a .keep file for the pack, preventing git from garbage collecting it. Args: msg: A message written inside the .keep file; can be used later to determine whether or not a .keep file is obsolete. Returns: The path of the .keep file, as a string. """ keepfile_name = "%s.keep" % self._basename with GitFile(keepfile_name, "wb") as keepfile: if msg: keepfile.write(msg) keepfile.write(b"\n") return keepfile_name try: from dulwich._pack import ( # type: ignore # noqa: F811 apply_delta, bisect_find_sha, ) except ImportError: pass diff --git a/dulwich/porcelain.py b/dulwich/porcelain.py index 85cd7d98..e5627ee3 100644 --- a/dulwich/porcelain.py +++ b/dulwich/porcelain.py @@ -1,1858 +1,1878 @@ # porcelain.py -- Porcelain-like layer on top of Dulwich # Copyright (C) 2013 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Simple wrapper that provides porcelain-like functions on top of Dulwich. Currently implemented: * archive * add * branch{_create,_delete,_list} * check-ignore * checkout * clone * commit * commit-tree * daemon * describe * diff-tree * fetch * init * ls-files * ls-remote * ls-tree * pull * push * rm * remote{_add} * receive-pack * reset * rev-list * tag{_create,_delete,_list} * upload-pack * update-server-info * status * symbolic-ref These functions are meant to behave similarly to the git subcommands. Differences in behaviour are considered bugs. Functions should generally accept both unicode strings and bytestrings """ from collections import namedtuple from contextlib import ( closing, contextmanager, ) from io import BytesIO, RawIOBase import datetime import os from pathlib import Path import posixpath import shutil import stat import sys import time from typing import ( Dict, Optional, Tuple, Union, ) from dulwich.archive import ( tar_stream, ) from dulwich.client import ( get_transport_and_path, ) from dulwich.config import ( StackedConfig, ) from dulwich.diff_tree import ( CHANGE_ADD, CHANGE_DELETE, CHANGE_MODIFY, CHANGE_RENAME, CHANGE_COPY, RENAME_CHANGE_TYPES, ) from dulwich.errors import ( SendPackError, ) from dulwich.graph import ( can_fast_forward, ) from dulwich.ignore import IgnoreFilterManager from dulwich.index import ( blob_from_path_and_stat, get_unstaged_changes, ) from dulwich.object_store import ( tree_lookup_path, ) from dulwich.objects import ( Commit, Tag, format_timezone, parse_timezone, pretty_format_tree_entry, ) from dulwich.objectspec import ( parse_commit, parse_object, parse_ref, parse_reftuples, parse_tree, ) from dulwich.pack import ( write_pack_index, write_pack_objects, ) from dulwich.patch import write_tree_diff from dulwich.protocol import ( Protocol, ZERO_SHA, ) from dulwich.refs import ( ANNOTATED_TAG_SUFFIX, LOCAL_BRANCH_PREFIX, strip_peeled_refs, RefsContainer, ) from dulwich.repo import BaseRepo, Repo from dulwich.server import ( FileSystemBackend, TCPGitServer, ReceivePackHandler, UploadPackHandler, update_server_info as server_update_server_info, ) # Module level tuple definition for status output GitStatus = namedtuple("GitStatus", "staged unstaged untracked") class NoneStream(RawIOBase): """Fallback if stdout or stderr are unavailable, does nothing.""" def read(self, size=-1): return None def readall(self): return None def readinto(self, b): return None def write(self, b): return None default_bytes_out_stream = getattr(sys.stdout, "buffer", None) or NoneStream() default_bytes_err_stream = getattr(sys.stderr, "buffer", None) or NoneStream() DEFAULT_ENCODING = "utf-8" class Error(Exception): """Porcelain-based error. """ def __init__(self, msg, inner=None): super(Error, self).__init__(msg) self.inner = inner class RemoteExists(Error): """Raised when the remote already exists.""" def open_repo(path_or_repo): """Open an argument that can be a repository or a path for a repository.""" if isinstance(path_or_repo, BaseRepo): return path_or_repo return Repo(path_or_repo) @contextmanager def _noop_context_manager(obj): """Context manager that has the same api as closing but does nothing.""" yield obj def open_repo_closing(path_or_repo): """Open an argument that can be a repository or a path for a repository. returns a context manager that will close the repo on exit if the argument is a path, else does nothing if the argument is a repo. """ if isinstance(path_or_repo, BaseRepo): return _noop_context_manager(path_or_repo) return closing(Repo(path_or_repo)) def path_to_tree_path(repopath, path, tree_encoding=DEFAULT_ENCODING): """Convert a path to a path usable in an index, e.g. bytes and relative to the repository root. Args: repopath: Repository path, absolute or relative to the cwd path: A path, absolute or relative to the cwd Returns: A path formatted for use in e.g. an index """ # Pathlib resolve before Python 3.6 could raises FileNotFoundError in case # there is no file matching the path so we reuse the old implementation for # Python 3.5 if sys.version_info < (3, 6): if not isinstance(path, bytes): path = os.fsencode(path) if not isinstance(repopath, bytes): repopath = os.fsencode(repopath) treepath = os.path.relpath(path, repopath) if treepath.startswith(b".."): err_msg = "Path %r not in repo path (%r)" % (path, repopath) raise ValueError(err_msg) if os.path.sep != "/": treepath = treepath.replace(os.path.sep.encode("ascii"), b"/") return treepath else: # Resolve might returns a relative path on Windows # https://bugs.python.org/issue38671 if sys.platform == "win32": path = os.path.abspath(path) path = Path(path).resolve() # Resolve and abspath seems to behave differently regarding symlinks, # as we are doing abspath on the file path, we need to do the same on # the repo path or they might not match if sys.platform == "win32": repopath = os.path.abspath(repopath) repopath = Path(repopath).resolve() relpath = path.relative_to(repopath) if sys.platform == "win32": return str(relpath).replace(os.path.sep, "/").encode(tree_encoding) else: return bytes(relpath) class DivergedBranches(Error): """Branches have diverged and fast-forward is not possible.""" def check_diverged(repo, current_sha, new_sha): """Check if updating to a sha can be done with fast forwarding. Args: repo: Repository object current_sha: Current head sha new_sha: New head sha """ try: can = can_fast_forward(repo, current_sha, new_sha) except KeyError: can = False if not can: raise DivergedBranches(current_sha, new_sha) def archive( repo, committish=None, outstream=default_bytes_out_stream, errstream=default_bytes_err_stream, ): """Create an archive. Args: repo: Path of repository for which to generate an archive. committish: Commit SHA1 or ref to use outstream: Output stream (defaults to stdout) errstream: Error stream (defaults to stderr) """ if committish is None: committish = "HEAD" with open_repo_closing(repo) as repo_obj: c = parse_commit(repo_obj, committish) for chunk in tar_stream( repo_obj.object_store, repo_obj.object_store[c.tree], c.commit_time ): outstream.write(chunk) def update_server_info(repo="."): """Update server info files for a repository. Args: repo: path to the repository """ with open_repo_closing(repo) as r: server_update_server_info(r) def symbolic_ref(repo, ref_name, force=False): """Set git symbolic ref into HEAD. Args: repo: path to the repository ref_name: short name of the new ref force: force settings without checking if it exists in refs/heads """ with open_repo_closing(repo) as repo_obj: ref_path = _make_branch_ref(ref_name) if not force and ref_path not in repo_obj.refs.keys(): raise Error("fatal: ref `%s` is not a ref" % ref_name) repo_obj.refs.set_symbolic_ref(b"HEAD", ref_path) def commit( - repo=".", message=None, author=None, committer=None, encoding=None, no_verify=False + repo=".", + message=None, + author=None, + committer=None, + encoding=None, + no_verify=False, ): """Create a new commit. Args: repo: Path to repository message: Optional commit message author: Optional author name and email committer: Optional committer name and email no_verify: Skip pre-commit and commit-msg hooks Returns: SHA1 of the new commit """ # FIXME: Support --all argument # FIXME: Support --signoff argument if getattr(message, "encode", None): message = message.encode(encoding or DEFAULT_ENCODING) if getattr(author, "encode", None): author = author.encode(encoding or DEFAULT_ENCODING) if getattr(committer, "encode", None): committer = committer.encode(encoding or DEFAULT_ENCODING) with open_repo_closing(repo) as r: return r.do_commit( message=message, author=author, committer=committer, encoding=encoding, no_verify=no_verify, ) def commit_tree(repo, tree, message=None, author=None, committer=None): """Create a new commit object. Args: repo: Path to repository tree: An existing tree object author: Optional author name and email committer: Optional committer name and email """ with open_repo_closing(repo) as r: return r.do_commit( message=message, tree=tree, committer=committer, author=author ) def init(path=".", bare=False): """Create a new git repository. Args: path: Path to repository. bare: Whether to create a bare repository. Returns: A Repo instance """ if not os.path.exists(path): os.mkdir(path) if bare: return Repo.init_bare(path) else: return Repo.init(path) def clone( source, target=None, bare=False, checkout=None, errstream=default_bytes_err_stream, outstream=None, origin=b"origin", depth=None, **kwargs ): """Clone a local or remote git repository. Args: source: Path or URL for source repository target: Path to target repository (optional) bare: Whether or not to create a bare repository checkout: Whether or not to check-out HEAD after cloning errstream: Optional stream to write progress to outstream: Optional stream to write progress to (deprecated) origin: Name of remote from the repository used to clone depth: Depth to fetch at Returns: The new repository """ # TODO(jelmer): This code overlaps quite a bit with Repo.clone if outstream is not None: import warnings warnings.warn( "outstream= has been deprecated in favour of errstream=.", DeprecationWarning, stacklevel=3, ) errstream = outstream if checkout is None: checkout = not bare if checkout and bare: raise Error("checkout and bare are incompatible") if target is None: target = source.split("/")[-1] if not os.path.exists(target): os.mkdir(target) if bare: r = Repo.init_bare(target) else: r = Repo.init(target) reflog_message = b"clone: from " + source.encode("utf-8") try: target_config = r.get_config() if not isinstance(source, bytes): source = source.encode(DEFAULT_ENCODING) target_config.set((b"remote", origin), b"url", source) target_config.set( (b"remote", origin), b"fetch", b"+refs/heads/*:refs/remotes/" + origin + b"/*", ) target_config.write_to_path() fetch_result = fetch( r, origin, errstream=errstream, message=reflog_message, depth=depth, **kwargs ) # TODO(jelmer): Support symref capability, # https://github.com/jelmer/dulwich/issues/485 try: head = r[fetch_result.refs[b"HEAD"]] except KeyError: head = None else: r[b"HEAD"] = head.id if checkout and not bare and head is not None: errstream.write(b"Checking out " + head.id + b"\n") r.reset_index(head.tree) except BaseException: shutil.rmtree(target) r.close() raise return r def add(repo=".", paths=None): """Add files to the staging area. Args: repo: Repository for the files paths: Paths to add. No value passed stages all modified files. Returns: Tuple with set of added files and ignored files """ ignored = set() with open_repo_closing(repo) as r: repo_path = Path(r.path).resolve() ignore_manager = IgnoreFilterManager.from_repo(r) if not paths: paths = list( get_untracked_paths( - str(Path(os.getcwd()).resolve()), str(repo_path), r.open_index() + str(Path(os.getcwd()).resolve()), + str(repo_path), + r.open_index(), ) ) relpaths = [] if not isinstance(paths, list): paths = [paths] for p in paths: relpath = str(Path(p).resolve().relative_to(repo_path)) # FIXME: Support patterns, directories. if ignore_manager.is_ignored(relpath): ignored.add(relpath) continue relpaths.append(relpath) r.stage(relpaths) return (relpaths, ignored) def _is_subdir(subdir, parentdir): """Check whether subdir is parentdir or a subdir of parentdir If parentdir or subdir is a relative path, it will be disamgibuated relative to the pwd. """ parentdir_abs = os.path.realpath(parentdir) + os.path.sep subdir_abs = os.path.realpath(subdir) + os.path.sep return subdir_abs.startswith(parentdir_abs) # TODO: option to remove ignored files also, in line with `git clean -fdx` def clean(repo=".", target_dir=None): """Remove any untracked files from the target directory recursively Equivalent to running `git clean -fd` in target_dir. Args: repo: Repository where the files may be tracked target_dir: Directory to clean - current directory if None """ if target_dir is None: target_dir = os.getcwd() with open_repo_closing(repo) as r: if not _is_subdir(target_dir, r.path): raise Error("target_dir must be in the repo's working dir") config = r.get_config_stack() require_force = config.get_boolean( # noqa: F841 (b"clean",), b"requireForce", True ) # TODO(jelmer): if require_force is set, then make sure that -f, -i or # -n is specified. index = r.open_index() ignore_manager = IgnoreFilterManager.from_repo(r) paths_in_wd = _walk_working_dir_paths(target_dir, r.path) # Reverse file visit order, so that files and subdirectories are # removed before containing directory for ap, is_dir in reversed(list(paths_in_wd)): if is_dir: # All subdirectories and files have been removed if untracked, # so dir contains no tracked files iff it is empty. is_empty = len(os.listdir(ap)) == 0 if is_empty: os.rmdir(ap) else: ip = path_to_tree_path(r.path, ap) is_tracked = ip in index rp = os.path.relpath(ap, r.path) is_ignored = ignore_manager.is_ignored(rp) if not is_tracked and not is_ignored: os.remove(ap) def remove(repo=".", paths=None, cached=False): """Remove files from the staging area. Args: repo: Repository for the files paths: Paths to remove """ with open_repo_closing(repo) as r: index = r.open_index() for p in paths: full_path = os.fsencode(os.path.abspath(p)) tree_path = path_to_tree_path(r.path, p) try: index_sha = index[tree_path].sha except KeyError: raise Error("%s did not match any files" % p) if not cached: try: st = os.lstat(full_path) except OSError: pass else: try: blob = blob_from_path_and_stat(full_path, st) except IOError: pass else: try: committed_sha = tree_lookup_path( r.__getitem__, r[r.head()].tree, tree_path )[1] except KeyError: committed_sha = None if blob.id != index_sha and index_sha != committed_sha: raise Error( "file has staged content differing " "from both the file and head: %s" % p ) if index_sha != committed_sha: raise Error("file has staged changes: %s" % p) os.remove(full_path) del index[tree_path] index.write() rm = remove def commit_decode(commit, contents, default_encoding=DEFAULT_ENCODING): if commit.encoding: encoding = commit.encoding.decode("ascii") else: encoding = default_encoding return contents.decode(encoding, "replace") def commit_encode(commit, contents, default_encoding=DEFAULT_ENCODING): if commit.encoding: encoding = commit.encoding.decode("ascii") else: encoding = default_encoding return contents.encode(encoding) def print_commit(commit, decode, outstream=sys.stdout): """Write a human-readable commit log entry. Args: commit: A `Commit` object outstream: A stream file to write to """ outstream.write("-" * 50 + "\n") outstream.write("commit: " + commit.id.decode("ascii") + "\n") if len(commit.parents) > 1: outstream.write( "merge: " + "...".join([c.decode("ascii") for c in commit.parents[1:]]) + "\n" ) outstream.write("Author: " + decode(commit.author) + "\n") if commit.author != commit.committer: outstream.write("Committer: " + decode(commit.committer) + "\n") time_tuple = time.gmtime(commit.author_time + commit.author_timezone) time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple) timezone_str = format_timezone(commit.author_timezone).decode("ascii") outstream.write("Date: " + time_str + " " + timezone_str + "\n") outstream.write("\n") outstream.write(decode(commit.message) + "\n") outstream.write("\n") def print_tag(tag, decode, outstream=sys.stdout): """Write a human-readable tag. Args: tag: A `Tag` object decode: Function for decoding bytes to unicode string outstream: A stream to write to """ outstream.write("Tagger: " + decode(tag.tagger) + "\n") time_tuple = time.gmtime(tag.tag_time + tag.tag_timezone) time_str = time.strftime("%a %b %d %Y %H:%M:%S", time_tuple) timezone_str = format_timezone(tag.tag_timezone).decode("ascii") outstream.write("Date: " + time_str + " " + timezone_str + "\n") outstream.write("\n") outstream.write(decode(tag.message) + "\n") outstream.write("\n") def show_blob(repo, blob, decode, outstream=sys.stdout): """Write a blob to a stream. Args: repo: A `Repo` object blob: A `Blob` object decode: Function for decoding bytes to unicode string outstream: A stream file to write to """ outstream.write(decode(blob.data)) def show_commit(repo, commit, decode, outstream=sys.stdout): """Show a commit to a stream. Args: repo: A `Repo` object commit: A `Commit` object decode: Function for decoding bytes to unicode string outstream: Stream to write to """ print_commit(commit, decode=decode, outstream=outstream) if commit.parents: parent_commit = repo[commit.parents[0]] base_tree = parent_commit.tree else: base_tree = None diffstream = BytesIO() write_tree_diff(diffstream, repo.object_store, base_tree, commit.tree) diffstream.seek(0) outstream.write(commit_decode(commit, diffstream.getvalue())) def show_tree(repo, tree, decode, outstream=sys.stdout): """Print a tree to a stream. Args: repo: A `Repo` object tree: A `Tree` object decode: Function for decoding bytes to unicode string outstream: Stream to write to """ for n in tree: outstream.write(decode(n) + "\n") def show_tag(repo, tag, decode, outstream=sys.stdout): """Print a tag to a stream. Args: repo: A `Repo` object tag: A `Tag` object decode: Function for decoding bytes to unicode string outstream: Stream to write to """ print_tag(tag, decode, outstream) show_object(repo, repo[tag.object[1]], decode, outstream) def show_object(repo, obj, decode, outstream): return { b"tree": show_tree, b"blob": show_blob, b"commit": show_commit, b"tag": show_tag, }[obj.type_name](repo, obj, decode, outstream) def print_name_status(changes): """Print a simple status summary, listing changed files.""" for change in changes: if not change: continue if isinstance(change, list): change = change[0] if change.type == CHANGE_ADD: path1 = change.new.path path2 = "" kind = "A" elif change.type == CHANGE_DELETE: path1 = change.old.path path2 = "" kind = "D" elif change.type == CHANGE_MODIFY: path1 = change.new.path path2 = "" kind = "M" elif change.type in RENAME_CHANGE_TYPES: path1 = change.old.path path2 = change.new.path if change.type == CHANGE_RENAME: kind = "R" elif change.type == CHANGE_COPY: kind = "C" yield "%-8s%-20s%-20s" % (kind, path1, path2) def log( repo=".", paths=None, outstream=sys.stdout, max_entries=None, reverse=False, name_status=False, ): """Write commit logs. Args: repo: Path to repository paths: Optional set of specific paths to print entries for outstream: Stream to write log output to reverse: Reverse order in which entries are printed name_status: Print name status max_entries: Optional maximum number of entries to display """ with open_repo_closing(repo) as r: walker = r.get_walker(max_entries=max_entries, paths=paths, reverse=reverse) for entry in walker: def decode(x): return commit_decode(entry.commit, x) print_commit(entry.commit, decode, outstream) if name_status: outstream.writelines( [line + "\n" for line in print_name_status(entry.changes())] ) # TODO(jelmer): better default for encoding? def show( - repo=".", objects=None, outstream=sys.stdout, default_encoding=DEFAULT_ENCODING + repo=".", + objects=None, + outstream=sys.stdout, + default_encoding=DEFAULT_ENCODING, ): """Print the changes in a commit. Args: repo: Path to repository objects: Objects to show (defaults to [HEAD]) outstream: Stream to write to default_encoding: Default encoding to use if none is set in the commit """ if objects is None: objects = ["HEAD"] if not isinstance(objects, list): objects = [objects] with open_repo_closing(repo) as r: for objectish in objects: o = parse_object(r, objectish) if isinstance(o, Commit): def decode(x): return commit_decode(o, x, default_encoding) else: def decode(x): return x.decode(default_encoding) show_object(r, o, decode, outstream) def diff_tree(repo, old_tree, new_tree, outstream=sys.stdout): """Compares the content and mode of blobs found via two tree objects. Args: repo: Path to repository old_tree: Id of old tree new_tree: Id of new tree outstream: Stream to write to """ with open_repo_closing(repo) as r: write_tree_diff(outstream, r.object_store, old_tree, new_tree) def rev_list(repo, commits, outstream=sys.stdout): """Lists commit objects in reverse chronological order. Args: repo: Path to repository commits: Commits over which to iterate outstream: Stream to write to """ with open_repo_closing(repo) as r: for entry in r.get_walker(include=[r[c].id for c in commits]): outstream.write(entry.commit.id + b"\n") def tag(*args, **kwargs): import warnings warnings.warn( "tag has been deprecated in favour of tag_create.", DeprecationWarning ) return tag_create(*args, **kwargs) def tag_create( repo, tag, author=None, message=None, annotated=False, objectish="HEAD", tag_time=None, tag_timezone=None, sign=False, ): """Creates a tag in git via dulwich calls: Args: repo: Path to repository tag: tag string author: tag author (optional, if annotated is set) message: tag message (optional) annotated: whether to create an annotated tag objectish: object the tag should point at, defaults to HEAD tag_time: Optional time for annotated tag tag_timezone: Optional timezone for annotated tag sign: GPG Sign the tag """ with open_repo_closing(repo) as r: object = parse_object(r, objectish) if annotated: # Create the tag object tag_obj = Tag() if author is None: # TODO(jelmer): Don't use repo private method. author = r._get_user_identity(r.get_config_stack()) tag_obj.tagger = author tag_obj.message = message tag_obj.name = tag tag_obj.object = (type(object), object.id) if tag_time is None: tag_time = int(time.time()) tag_obj.tag_time = tag_time if tag_timezone is None: # TODO(jelmer) Use current user timezone rather than UTC tag_timezone = 0 elif isinstance(tag_timezone, str): tag_timezone = parse_timezone(tag_timezone) tag_obj.tag_timezone = tag_timezone if sign: import gpg with gpg.Context(armor=True) as c: tag_obj.signature, unused_result = c.sign(tag_obj.as_raw_string()) r.object_store.add_object(tag_obj) tag_id = tag_obj.id else: tag_id = object.id r.refs[_make_tag_ref(tag)] = tag_id def list_tags(*args, **kwargs): import warnings warnings.warn( - "list_tags has been deprecated in favour of tag_list.", DeprecationWarning + "list_tags has been deprecated in favour of tag_list.", + DeprecationWarning, ) return tag_list(*args, **kwargs) def tag_list(repo, outstream=sys.stdout): """List all tags. Args: repo: Path to repository outstream: Stream to write tags to """ with open_repo_closing(repo) as r: tags = sorted(r.refs.as_dict(b"refs/tags")) return tags def tag_delete(repo, name): """Remove a tag. Args: repo: Path to repository name: Name of tag to remove """ with open_repo_closing(repo) as r: if isinstance(name, bytes): names = [name] elif isinstance(name, list): names = name else: raise Error("Unexpected tag name type %r" % name) for name in names: del r.refs[_make_tag_ref(name)] def reset(repo, mode, treeish="HEAD"): """Reset current HEAD to the specified state. Args: repo: Path to repository mode: Mode ("hard", "soft", "mixed") treeish: Treeish to reset to """ if mode != "hard": raise Error("hard is the only mode currently supported") with open_repo_closing(repo) as r: tree = parse_tree(r, treeish) r.reset_index(tree.id) def get_remote_repo( repo: Repo, remote_location: Optional[Union[str, bytes]] = None ) -> Tuple[Optional[str], str]: config = repo.get_config() if remote_location is None: remote_location = get_branch_remote(repo) if isinstance(remote_location, str): encoded_location = remote_location.encode() else: encoded_location = remote_location section = (b"remote", encoded_location) remote_name = None # type: Optional[str] if config.has_section(section): remote_name = encoded_location.decode() url = config.get(section, "url") encoded_location = url else: remote_name = None return (remote_name, encoded_location.decode()) def push( repo, remote_location=None, refspecs=None, outstream=default_bytes_out_stream, errstream=default_bytes_err_stream, force=False, **kwargs ): """Remote push with dulwich via dulwich.client Args: repo: Path to repository remote_location: Location of the remote refspecs: Refs to push to remote outstream: A stream file to write output errstream: A stream file to write errors force: Force overwriting refs """ # Open the repo with open_repo_closing(repo) as r: (remote_name, remote_location) = get_remote_repo(r, remote_location) # Get the client and path client, path = get_transport_and_path( remote_location, config=r.get_config_stack(), **kwargs ) selected_refs = [] remote_changed_refs = {} def update_refs(refs): selected_refs.extend(parse_reftuples(r.refs, refs, refspecs, force=force)) new_refs = {} # TODO: Handle selected_refs == {None: None} for (lh, rh, force_ref) in selected_refs: if lh is None: new_refs[rh] = ZERO_SHA remote_changed_refs[rh] = None else: try: localsha = r.refs[lh] except KeyError: raise Error("No valid ref %s in local repository" % lh) if not force_ref and rh in refs: check_diverged(r, refs[rh], localsha) new_refs[rh] = localsha remote_changed_refs[rh] = localsha return new_refs err_encoding = getattr(errstream, "encoding", None) or DEFAULT_ENCODING remote_location = client.get_url(path) try: result = client.send_pack( path, update_refs, generate_pack_data=r.generate_pack_data, progress=errstream.write, ) except SendPackError as e: raise Error( "Push to " + remote_location + " failed -> " + e.args[0].decode(), inner=e, ) else: errstream.write( b"Push to " + remote_location.encode(err_encoding) + b" successful.\n" ) for ref, error in (result.ref_status or {}).items(): if error is not None: errstream.write( b"Push of ref %s failed: %s\n" % (ref, error.encode(err_encoding)) ) else: errstream.write(b"Ref %s updated\n" % ref) if remote_name is not None: _import_remote_refs(r.refs, remote_name, remote_changed_refs) def pull( repo, remote_location=None, refspecs=None, outstream=default_bytes_out_stream, errstream=default_bytes_err_stream, fast_forward=True, force=False, **kwargs ): """Pull from remote via dulwich.client Args: repo: Path to repository remote_location: Location of the remote refspec: refspecs to fetch outstream: A stream file to write to output errstream: A stream file to write to errors """ # Open the repo with open_repo_closing(repo) as r: (remote_name, remote_location) = get_remote_repo(r, remote_location) if refspecs is None: refspecs = [b"HEAD"] selected_refs = [] def determine_wants(remote_refs): selected_refs.extend( parse_reftuples(remote_refs, r.refs, refspecs, force=force) ) return [ remote_refs[lh] for (lh, rh, force_ref) in selected_refs if remote_refs[lh] not in r.object_store ] client, path = get_transport_and_path( remote_location, config=r.get_config_stack(), **kwargs ) fetch_result = client.fetch( path, r, progress=errstream.write, determine_wants=determine_wants ) for (lh, rh, force_ref) in selected_refs: try: check_diverged(r, r.refs[rh], fetch_result.refs[lh]) except DivergedBranches: if fast_forward: raise else: raise NotImplementedError("merge is not yet supported") r.refs[rh] = fetch_result.refs[lh] if selected_refs: r[b"HEAD"] = fetch_result.refs[selected_refs[0][1]] # Perform 'git checkout .' - syncs staged changes tree = r[b"HEAD"].tree r.reset_index(tree=tree) if remote_name is not None: _import_remote_refs(r.refs, remote_name, fetch_result.refs) def status(repo=".", ignored=False): """Returns staged, unstaged, and untracked changes relative to the HEAD. Args: repo: Path to repository or repository object ignored: Whether to include ignored files in `untracked` Returns: GitStatus tuple, staged - dict with lists of staged paths (diff index/HEAD) unstaged - list of unstaged paths (diff index/working-tree) untracked - list of untracked, un-ignored & non-.git paths """ with open_repo_closing(repo) as r: # 1. Get status of staged tracked_changes = get_tree_changes(r) # 2. Get status of unstaged index = r.open_index() normalizer = r.get_blob_normalizer() filter_callback = normalizer.checkin_normalize unstaged_changes = list(get_unstaged_changes(index, r.path, filter_callback)) untracked_paths = get_untracked_paths( r.path, r.path, index, exclude_ignored=not ignored ) untracked_changes = list(untracked_paths) return GitStatus(tracked_changes, unstaged_changes, untracked_changes) def _walk_working_dir_paths(frompath, basepath): """Get path, is_dir for files in working dir from frompath Args: frompath: Path to begin walk basepath: Path to compare to """ for dirpath, dirnames, filenames in os.walk(frompath): # Skip .git and below. if ".git" in dirnames: dirnames.remove(".git") if dirpath != basepath: continue if ".git" in filenames: filenames.remove(".git") if dirpath != basepath: continue if dirpath != frompath: yield dirpath, True for filename in filenames: filepath = os.path.join(dirpath, filename) yield filepath, False def get_untracked_paths(frompath, basepath, index, exclude_ignored=False): """Get untracked paths. Args: frompath: Path to walk basepath: Path to compare to index: Index to check against exclude_ignored: Whether to exclude ignored paths """ if exclude_ignored: with open_repo_closing(frompath) as r: ignore_manager = IgnoreFilterManager.from_repo(r) else: ignore_manager = None for ap, is_dir in _walk_working_dir_paths(frompath, basepath): if ignore_manager is not None and ignore_manager.is_ignored( os.path.relpath(ap, frompath) ): continue if not is_dir: ip = path_to_tree_path(basepath, ap) if ip not in index: yield os.path.relpath(ap, frompath) def get_tree_changes(repo): """Return add/delete/modify changes to tree by comparing index to HEAD. Args: repo: repo path or object Returns: dict with lists for each type of change """ with open_repo_closing(repo) as r: index = r.open_index() # Compares the Index to the HEAD & determines changes # Iterate through the changes and report add/delete/modify # TODO: call out to dulwich.diff_tree somehow. tracked_changes = { "add": [], "delete": [], "modify": [], } try: tree_id = r[b"HEAD"].tree except KeyError: tree_id = None for change in index.changes_from_tree(r.object_store, tree_id): if not change[0][0]: tracked_changes["add"].append(change[0][1]) elif not change[0][1]: tracked_changes["delete"].append(change[0][0]) elif change[0][0] == change[0][1]: tracked_changes["modify"].append(change[0][0]) else: raise NotImplementedError("git mv ops not yet supported") return tracked_changes def daemon(path=".", address=None, port=None): """Run a daemon serving Git requests over TCP/IP. Args: path: Path to the directory to serve. address: Optional address to listen on (defaults to ::) port: Optional port to listen on (defaults to TCP_GIT_PORT) """ # TODO(jelmer): Support git-daemon-export-ok and --export-all. backend = FileSystemBackend(path) server = TCPGitServer(backend, address, port) server.serve_forever() def web_daemon(path=".", address=None, port=None): """Run a daemon serving Git requests over HTTP. Args: path: Path to the directory to serve address: Optional address to listen on (defaults to ::) port: Optional port to listen on (defaults to 80) """ from dulwich.web import ( make_wsgi_chain, make_server, WSGIRequestHandlerLogger, WSGIServerLogger, ) backend = FileSystemBackend(path) app = make_wsgi_chain(backend) server = make_server( address, port, app, handler_class=WSGIRequestHandlerLogger, server_class=WSGIServerLogger, ) server.serve_forever() def upload_pack(path=".", inf=None, outf=None): """Upload a pack file after negotiating its contents using smart protocol. Args: path: Path to the repository inf: Input stream to communicate with client outf: Output stream to communicate with client """ if outf is None: outf = getattr(sys.stdout, "buffer", sys.stdout) if inf is None: inf = getattr(sys.stdin, "buffer", sys.stdin) path = os.path.expanduser(path) backend = FileSystemBackend(path) def send_fn(data): outf.write(data) outf.flush() proto = Protocol(inf.read, send_fn) handler = UploadPackHandler(backend, [path], proto) # FIXME: Catch exceptions and write a single-line summary to outf. handler.handle() return 0 def receive_pack(path=".", inf=None, outf=None): """Receive a pack file after negotiating its contents using smart protocol. Args: path: Path to the repository inf: Input stream to communicate with client outf: Output stream to communicate with client """ if outf is None: outf = getattr(sys.stdout, "buffer", sys.stdout) if inf is None: inf = getattr(sys.stdin, "buffer", sys.stdin) path = os.path.expanduser(path) backend = FileSystemBackend(path) def send_fn(data): outf.write(data) outf.flush() proto = Protocol(inf.read, send_fn) handler = ReceivePackHandler(backend, [path], proto) # FIXME: Catch exceptions and write a single-line summary to outf. handler.handle() return 0 def _make_branch_ref(name): if getattr(name, "encode", None): name = name.encode(DEFAULT_ENCODING) return LOCAL_BRANCH_PREFIX + name def _make_tag_ref(name): if getattr(name, "encode", None): name = name.encode(DEFAULT_ENCODING) return b"refs/tags/" + name def branch_delete(repo, name): """Delete a branch. Args: repo: Path to the repository name: Name of the branch """ with open_repo_closing(repo) as r: if isinstance(name, list): names = name else: names = [name] for name in names: del r.refs[_make_branch_ref(name)] def branch_create(repo, name, objectish=None, force=False): """Create a branch. Args: repo: Path to the repository name: Name of the new branch objectish: Target object to point new branch at (defaults to HEAD) force: Force creation of branch, even if it already exists """ with open_repo_closing(repo) as r: if objectish is None: objectish = "HEAD" object = parse_object(r, objectish) refname = _make_branch_ref(name) ref_message = b"branch: Created from " + objectish.encode("utf-8") if force: r.refs.set_if_equals(refname, None, object.id, message=ref_message) else: if not r.refs.add_if_new(refname, object.id, message=ref_message): raise Error("Branch with name %s already exists." % name) def branch_list(repo): """List all branches. Args: repo: Path to the repository """ with open_repo_closing(repo) as r: return r.refs.keys(base=LOCAL_BRANCH_PREFIX) def active_branch(repo): """Return the active branch in the repository, if any. Args: repo: Repository to open Returns: branch name Raises: KeyError: if the repository does not have a working tree IndexError: if HEAD is floating """ with open_repo_closing(repo) as r: active_ref = r.refs.follow(b"HEAD")[0][1] if not active_ref.startswith(LOCAL_BRANCH_PREFIX): raise ValueError(active_ref) return active_ref[len(LOCAL_BRANCH_PREFIX) :] def get_branch_remote(repo): """Return the active branch's remote name, if any. Args: repo: Repository to open Returns: remote name Raises: KeyError: if the repository does not have a working tree """ with open_repo_closing(repo) as r: branch_name = active_branch(r.path) config = r.get_config() try: remote_name = config.get((b"branch", branch_name), b"remote") except KeyError: remote_name = b"origin" return remote_name def _import_remote_refs( refs_container: RefsContainer, remote_name: str, refs: Dict[str, str], message: Optional[bytes] = None, prune: bool = False, prune_tags: bool = False, ): stripped_refs = strip_peeled_refs(refs) branches = { n[len(LOCAL_BRANCH_PREFIX) :]: v for (n, v) in stripped_refs.items() if n.startswith(LOCAL_BRANCH_PREFIX) } refs_container.import_refs( - b"refs/remotes/" + remote_name.encode(), branches, message=message, prune=prune + b"refs/remotes/" + remote_name.encode(), + branches, + message=message, + prune=prune, ) tags = { n[len(b"refs/tags/") :]: v for (n, v) in stripped_refs.items() if n.startswith(b"refs/tags/") and not n.endswith(ANNOTATED_TAG_SUFFIX) } refs_container.import_refs(b"refs/tags", tags, message=message, prune=prune_tags) def fetch( repo, remote_location=None, outstream=sys.stdout, errstream=default_bytes_err_stream, message=None, depth=None, prune=False, prune_tags=False, force=False, **kwargs ): """Fetch objects from a remote server. Args: repo: Path to the repository remote_location: String identifying a remote server outstream: Output stream (defaults to stdout) errstream: Error stream (defaults to stderr) message: Reflog message (defaults to b"fetch: from ") depth: Depth to fetch at prune: Prune remote removed refs prune_tags: Prune reomte removed tags Returns: Dictionary with refs on the remote """ with open_repo_closing(repo) as r: (remote_name, remote_location) = get_remote_repo(r, remote_location) if message is None: message = b"fetch: from " + remote_location.encode("utf-8") client, path = get_transport_and_path( remote_location, config=r.get_config_stack(), **kwargs ) fetch_result = client.fetch(path, r, progress=errstream.write, depth=depth) if remote_name is not None: _import_remote_refs( r.refs, remote_name, fetch_result.refs, message, prune=prune, prune_tags=prune_tags, ) return fetch_result def ls_remote(remote, config=None, **kwargs): """List the refs in a remote. Args: remote: Remote repository location config: Configuration to use Returns: Dictionary with remote refs """ if config is None: config = StackedConfig.default() client, host_path = get_transport_and_path(remote, config=config, **kwargs) return client.get_refs(host_path) def repack(repo): """Repack loose files in a repository. Currently this only packs loose objects. Args: repo: Path to the repository """ with open_repo_closing(repo) as r: r.object_store.pack_loose_objects() def pack_objects(repo, object_ids, packf, idxf, delta_window_size=None): """Pack objects into a file. Args: repo: Path to the repository object_ids: List of object ids to write packf: File-like object to write to idxf: File-like object to write to (can be None) """ with open_repo_closing(repo) as r: entries, data_sum = write_pack_objects( packf, r.object_store.iter_shas((oid, None) for oid in object_ids), delta_window_size=delta_window_size, ) if idxf is not None: entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()]) write_pack_index(idxf, entries, data_sum) def ls_tree( - repo, treeish=b"HEAD", outstream=sys.stdout, recursive=False, name_only=False + repo, + treeish=b"HEAD", + outstream=sys.stdout, + recursive=False, + name_only=False, ): """List contents of a tree. Args: repo: Path to the repository tree_ish: Tree id to list outstream: Output stream (defaults to stdout) recursive: Whether to recursively list files name_only: Only print item name """ def list_tree(store, treeid, base): for (name, mode, sha) in store[treeid].iteritems(): if base: name = posixpath.join(base, name) if name_only: outstream.write(name + b"\n") else: outstream.write(pretty_format_tree_entry(name, mode, sha)) if stat.S_ISDIR(mode) and recursive: list_tree(store, sha, name) with open_repo_closing(repo) as r: tree = parse_tree(r, treeish) list_tree(r.object_store, tree.id, "") def remote_add(repo, name, url): """Add a remote. Args: repo: Path to the repository name: Remote name url: Remote URL """ if not isinstance(name, bytes): name = name.encode(DEFAULT_ENCODING) if not isinstance(url, bytes): url = url.encode(DEFAULT_ENCODING) with open_repo_closing(repo) as r: c = r.get_config() section = (b"remote", name) if c.has_section(section): raise RemoteExists(section) c.set(section, b"url", url) c.write_to_path() def check_ignore(repo, paths, no_index=False): """Debug gitignore files. Args: repo: Path to the repository paths: List of paths to check for no_index: Don't check index Returns: List of ignored files """ with open_repo_closing(repo) as r: index = r.open_index() ignore_manager = IgnoreFilterManager.from_repo(r) for path in paths: if not no_index and path_to_tree_path(r.path, path) in index: continue if os.path.isabs(path): path = os.path.relpath(path, r.path) if ignore_manager.is_ignored(path): yield path def update_head(repo, target, detached=False, new_branch=None): """Update HEAD to point at a new branch/commit. Note that this does not actually update the working tree. Args: repo: Path to the repository detach: Create a detached head target: Branch or committish to switch to new_branch: New branch to create """ with open_repo_closing(repo) as r: if new_branch is not None: to_set = _make_branch_ref(new_branch) else: to_set = b"HEAD" if detached: # TODO(jelmer): Provide some way so that the actual ref gets # updated rather than what it points to, so the delete isn't # necessary. del r.refs[to_set] r.refs[to_set] = parse_commit(r, target).id else: r.refs.set_symbolic_ref(to_set, parse_ref(r, target)) if new_branch is not None: r.refs.set_symbolic_ref(b"HEAD", to_set) def check_mailmap(repo, contact): """Check canonical name and email of contact. Args: repo: Path to the repository contact: Contact name and/or email Returns: Canonical contact data """ with open_repo_closing(repo) as r: from dulwich.mailmap import Mailmap try: mailmap = Mailmap.from_path(os.path.join(r.path, ".mailmap")) except FileNotFoundError: mailmap = Mailmap() return mailmap.lookup(contact) def fsck(repo): """Check a repository. Args: repo: A path to the repository Returns: Iterator over errors/warnings """ with open_repo_closing(repo) as r: # TODO(jelmer): check pack files # TODO(jelmer): check graph # TODO(jelmer): check refs for sha in r.object_store: o = r.object_store[sha] try: o.check() except Exception as e: yield (sha, e) def stash_list(repo): """List all stashes in a repository.""" with open_repo_closing(repo) as r: from dulwich.stash import Stash stash = Stash.from_repo(r) return enumerate(list(stash.stashes())) def stash_push(repo): """Push a new stash onto the stack.""" with open_repo_closing(repo) as r: from dulwich.stash import Stash stash = Stash.from_repo(r) stash.push() def stash_pop(repo): """Pop a new stash from the stack.""" with open_repo_closing(repo) as r: from dulwich.stash import Stash stash = Stash.from_repo(r) stash.pop() def ls_files(repo): """List all files in an index.""" with open_repo_closing(repo) as r: return sorted(r.open_index()) def describe(repo): """Describe the repository version. Args: projdir: git repository root Returns: a string description of the current git revision Examples: "gabcdefh", "v0.1" or "v0.1-5-gabcdefh". """ # Get the repository with open_repo_closing(repo) as r: # Get a list of all tags refs = r.get_refs() tags = {} for key, value in refs.items(): key = key.decode() obj = r.get_object(value) if u"tags" not in key: continue _, tag = key.rsplit(u"/", 1) try: commit = obj.object except AttributeError: continue else: commit = r.get_object(commit[1]) tags[tag] = [ datetime.datetime(*time.gmtime(commit.commit_time)[:6]), commit.id.decode("ascii"), ] sorted_tags = sorted(tags.items(), key=lambda tag: tag[1][0], reverse=True) # If there are no tags, return the current commit if len(sorted_tags) == 0: return "g{}".format(r[r.head()].id.decode("ascii")[:7]) # We're now 0 commits from the top commit_count = 0 # Get the latest commit latest_commit = r[r.head()] # Walk through all commits walker = r.get_walker() for entry in walker: # Check if tag commit_id = entry.commit.id.decode("ascii") for tag in sorted_tags: tag_name = tag[0] tag_commit = tag[1][1] if commit_id == tag_commit: if commit_count == 0: return tag_name else: return "{}-{}-g{}".format( - tag_name, commit_count, latest_commit.id.decode("ascii")[:7] + tag_name, + commit_count, + latest_commit.id.decode("ascii")[:7], ) commit_count += 1 # Return plain commit if no parent tag can be found return "g{}".format(latest_commit.id.decode("ascii")[:7]) def get_object_by_path(repo, path, committish=None): """Get an object by path. Args: repo: A path to the repository path: Path to look up committish: Commit to look up path in Returns: A `ShaFile` object """ if committish is None: committish = "HEAD" # Get the repository with open_repo_closing(repo) as r: commit = parse_commit(r, committish) base_tree = commit.tree if not isinstance(path, bytes): path = commit_encode(commit, path) (mode, sha) = tree_lookup_path(r.object_store.__getitem__, base_tree, path) return r[sha] def write_tree(repo): """Write a tree object from the index. Args: repo: Repository for which to write tree Returns: tree id for the tree that was written """ with open_repo_closing(repo) as r: return r.open_index().commit(r.object_store) diff --git a/dulwich/reflog.py b/dulwich/reflog.py index 0f2ece44..73d11886 100644 --- a/dulwich/reflog.py +++ b/dulwich/reflog.py @@ -1,94 +1,95 @@ # reflog.py -- Parsing and writing reflog files # Copyright (C) 2015 Jelmer Vernooij and others. # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Utilities for reading and generating reflogs. """ import collections from dulwich.objects import ( format_timezone, parse_timezone, ZERO_SHA, ) Entry = collections.namedtuple( - "Entry", ["old_sha", "new_sha", "committer", "timestamp", "timezone", "message"] + "Entry", + ["old_sha", "new_sha", "committer", "timestamp", "timezone", "message"], ) def format_reflog_line(old_sha, new_sha, committer, timestamp, timezone, message): """Generate a single reflog line. Args: old_sha: Old Commit SHA new_sha: New Commit SHA committer: Committer name and e-mail timestamp: Timestamp timezone: Timezone message: Message """ if old_sha is None: old_sha = ZERO_SHA return ( old_sha + b" " + new_sha + b" " + committer + b" " + str(int(timestamp)).encode("ascii") + b" " + format_timezone(timezone) + b"\t" + message ) def parse_reflog_line(line): """Parse a reflog line. Args: line: Line to parse Returns: Tuple of (old_sha, new_sha, committer, timestamp, timezone, message) """ (begin, message) = line.split(b"\t", 1) (old_sha, new_sha, rest) = begin.split(b" ", 2) (committer, timestamp_str, timezone_str) = rest.rsplit(b" ", 2) return Entry( old_sha, new_sha, committer, int(timestamp_str), parse_timezone(timezone_str)[0], message, ) def read_reflog(f): """Read reflog. Args: f: File-like object Returns: Iterator over Entry objects """ for line in f: yield parse_reflog_line(line) diff --git a/dulwich/refs.py b/dulwich/refs.py index 1b7029ec..0d6d9fd7 100644 --- a/dulwich/refs.py +++ b/dulwich/refs.py @@ -1,1155 +1,1203 @@ # refs.py -- For dealing with git refs # Copyright (C) 2008-2013 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Ref handling. """ import os from dulwich.errors import ( PackedRefsException, RefFormatError, ) from dulwich.objects import ( git_line, valid_hexsha, ZERO_SHA, ) from dulwich.file import ( GitFile, ensure_dir_exists, ) SYMREF = b"ref: " LOCAL_BRANCH_PREFIX = b"refs/heads/" LOCAL_TAG_PREFIX = b"refs/tags/" BAD_REF_CHARS = set(b"\177 ~^:?*[") ANNOTATED_TAG_SUFFIX = b"^{}" def parse_symref_value(contents): """Parse a symref value. Args: contents: Contents to parse Returns: Destination """ if contents.startswith(SYMREF): return contents[len(SYMREF) :].rstrip(b"\r\n") raise ValueError(contents) def check_ref_format(refname): """Check if a refname is correctly formatted. Implements all the same rules as git-check-ref-format[1]. [1] http://www.kernel.org/pub/software/scm/git/docs/git-check-ref-format.html Args: refname: The refname to check Returns: True if refname is valid, False otherwise """ # These could be combined into one big expression, but are listed # separately to parallel [1]. if b"/." in refname or refname.startswith(b"."): return False if b"/" not in refname: return False if b".." in refname: return False for i, c in enumerate(refname): if ord(refname[i : i + 1]) < 0o40 or c in BAD_REF_CHARS: return False if refname[-1] in b"/.": return False if refname.endswith(b".lock"): return False if b"@{" in refname: return False if b"\\" in refname: return False return True class RefsContainer(object): """A container for refs.""" def __init__(self, logger=None): self._logger = logger def _log( self, ref, old_sha, new_sha, committer=None, timestamp=None, timezone=None, message=None, ): if self._logger is None: return if message is None: return self._logger(ref, old_sha, new_sha, committer, timestamp, timezone, message) def set_symbolic_ref( - self, name, other, committer=None, timestamp=None, timezone=None, message=None + self, + name, + other, + committer=None, + timestamp=None, + timezone=None, + message=None, ): """Make a ref point at another ref. Args: name: Name of the ref to set other: Name of the ref to point at message: Optional message """ raise NotImplementedError(self.set_symbolic_ref) def get_packed_refs(self): """Get contents of the packed-refs file. Returns: Dictionary mapping ref names to SHA1s Note: Will return an empty dictionary when no packed-refs file is present. """ raise NotImplementedError(self.get_packed_refs) def get_peeled(self, name): """Return the cached peeled value of a ref, if available. Args: name: Name of the ref to peel Returns: The peeled value of the ref. If the ref is known not point to a tag, this will be the SHA the ref refers to. If the ref may point to a tag, but no cached information is available, None is returned. """ return None def import_refs( self, base, other, committer=None, timestamp=None, timezone=None, message=None, prune=False, ): if prune: to_delete = set(self.subkeys(base)) else: to_delete = set() for name, value in other.items(): if value is None: to_delete.add(name) else: self.set_if_equals( b"/".join((base, name)), None, value, message=message ) if to_delete: try: to_delete.remove(name) except KeyError: pass for ref in to_delete: self.remove_if_equals(b"/".join((base, ref)), None, message=message) def allkeys(self): """All refs present in this container.""" raise NotImplementedError(self.allkeys) def __iter__(self): return iter(self.allkeys()) def keys(self, base=None): """Refs present in this container. Args: base: An optional base to return refs under. Returns: An unsorted set of valid refs in this container, including packed refs. """ if base is not None: return self.subkeys(base) else: return self.allkeys() def subkeys(self, base): """Refs present in this container under a base. Args: base: The base to return refs under. Returns: A set of valid refs in this container under the base; the base prefix is stripped from the ref names returned. """ keys = set() base_len = len(base) + 1 for refname in self.allkeys(): if refname.startswith(base): keys.add(refname[base_len:]) return keys def as_dict(self, base=None): """Return the contents of this container as a dictionary.""" ret = {} keys = self.keys(base) if base is None: base = b"" else: base = base.rstrip(b"/") for key in keys: try: ret[key] = self[(base + b"/" + key).strip(b"/")] except KeyError: continue # Unable to resolve return ret def _check_refname(self, name): """Ensure a refname is valid and lives in refs or is HEAD. HEAD is not a valid refname according to git-check-ref-format, but this class needs to be able to touch HEAD. Also, check_ref_format expects refnames without the leading 'refs/', but this class requires that so it cannot touch anything outside the refs dir (or HEAD). Args: name: The name of the reference. Raises: KeyError: if a refname is not HEAD or is otherwise not valid. """ if name in (b"HEAD", b"refs/stash"): return if not name.startswith(b"refs/") or not check_ref_format(name[5:]): raise RefFormatError(name) def read_ref(self, refname): """Read a reference without following any references. Args: refname: The name of the reference Returns: The contents of the ref file, or None if it does not exist. """ contents = self.read_loose_ref(refname) if not contents: contents = self.get_packed_refs().get(refname, None) return contents def read_loose_ref(self, name): """Read a loose reference and return its contents. Args: name: the refname to read Returns: The contents of the ref file, or None if it does not exist. """ raise NotImplementedError(self.read_loose_ref) def follow(self, name): """Follow a reference name. Returns: a tuple of (refnames, sha), wheres refnames are the names of references in the chain """ contents = SYMREF + name depth = 0 refnames = [] while contents.startswith(SYMREF): refname = contents[len(SYMREF) :] refnames.append(refname) contents = self.read_ref(refname) if not contents: break depth += 1 if depth > 5: raise KeyError(name) return refnames, contents def _follow(self, name): import warnings warnings.warn( "RefsContainer._follow is deprecated. Use RefsContainer.follow " "instead.", DeprecationWarning, ) refnames, contents = self.follow(name) if not refnames: return (None, contents) return (refnames[-1], contents) def __contains__(self, refname): if self.read_ref(refname): return True return False def __getitem__(self, name): """Get the SHA1 for a reference name. This method follows all symbolic references. """ _, sha = self.follow(name) if sha is None: raise KeyError(name) return sha def set_if_equals( self, name, old_ref, new_ref, committer=None, timestamp=None, timezone=None, message=None, ): """Set a refname to new_ref only if it currently equals old_ref. This method follows all symbolic references if applicable for the subclass, and can be used to perform an atomic compare-and-swap operation. Args: name: The refname to set. old_ref: The old sha the refname must refer to, or None to set unconditionally. new_ref: The new sha the refname will refer to. message: Message for reflog Returns: True if the set was successful, False otherwise. """ raise NotImplementedError(self.set_if_equals) def add_if_new(self, name, ref): """Add a new reference only if it does not already exist. Args: name: Ref name ref: Ref value message: Message for reflog """ raise NotImplementedError(self.add_if_new) def __setitem__(self, name, ref): """Set a reference name to point to the given SHA1. This method follows all symbolic references if applicable for the subclass. Note: This method unconditionally overwrites the contents of a reference. To update atomically only if the reference has not changed, use set_if_equals(). Args: name: The refname to set. ref: The new sha the refname will refer to. """ self.set_if_equals(name, None, ref) def remove_if_equals( - self, name, old_ref, committer=None, timestamp=None, timezone=None, message=None + self, + name, + old_ref, + committer=None, + timestamp=None, + timezone=None, + message=None, ): """Remove a refname only if it currently equals old_ref. This method does not follow symbolic references, even if applicable for the subclass. It can be used to perform an atomic compare-and-delete operation. Args: name: The refname to delete. old_ref: The old sha the refname must refer to, or None to delete unconditionally. message: Message for reflog Returns: True if the delete was successful, False otherwise. """ raise NotImplementedError(self.remove_if_equals) def __delitem__(self, name): """Remove a refname. This method does not follow symbolic references, even if applicable for the subclass. Note: This method unconditionally deletes the contents of a reference. To delete atomically only if the reference has not changed, use remove_if_equals(). Args: name: The refname to delete. """ self.remove_if_equals(name, None) def get_symrefs(self): """Get a dict with all symrefs in this container. Returns: Dictionary mapping source ref to target ref """ ret = {} for src in self.allkeys(): try: dst = parse_symref_value(self.read_ref(src)) except ValueError: pass else: ret[src] = dst return ret def watch(self): """Watch for changes to the refs in this container. Returns a context manager that yields tuples with (refname, new_sha) """ raise NotImplementedError(self.watch) class _DictRefsWatcher(object): def __init__(self, refs): self._refs = refs def __enter__(self): from queue import Queue self.queue = Queue() self._refs._watchers.add(self) return self def __next__(self): return self.queue.get() def _notify(self, entry): self.queue.put_nowait(entry) def __exit__(self, exc_type, exc_val, exc_tb): self._refs._watchers.remove(self) return False class DictRefsContainer(RefsContainer): """RefsContainer backed by a simple dict. This container does not support symbolic or packed references and is not threadsafe. """ def __init__(self, refs, logger=None): super(DictRefsContainer, self).__init__(logger=logger) self._refs = refs self._peeled = {} self._watchers = set() def allkeys(self): return self._refs.keys() def read_loose_ref(self, name): return self._refs.get(name, None) def get_packed_refs(self): return {} def _notify(self, ref, newsha): for watcher in self._watchers: watcher._notify((ref, newsha)) def watch(self): return _DictRefsWatcher(self) def set_symbolic_ref( - self, name, other, committer=None, timestamp=None, timezone=None, message=None + self, + name, + other, + committer=None, + timestamp=None, + timezone=None, + message=None, ): old = self.follow(name)[-1] new = SYMREF + other self._refs[name] = new self._notify(name, new) self._log( name, old, new, committer=committer, timestamp=timestamp, timezone=timezone, message=message, ) def set_if_equals( self, name, old_ref, new_ref, committer=None, timestamp=None, timezone=None, message=None, ): if old_ref is not None and self._refs.get(name, ZERO_SHA) != old_ref: return False realnames, _ = self.follow(name) for realname in realnames: self._check_refname(realname) old = self._refs.get(realname) self._refs[realname] = new_ref self._notify(realname, new_ref) self._log( realname, old, new_ref, committer=committer, timestamp=timestamp, timezone=timezone, message=message, ) return True def add_if_new( - self, name, ref, committer=None, timestamp=None, timezone=None, message=None + self, + name, + ref, + committer=None, + timestamp=None, + timezone=None, + message=None, ): if name in self._refs: return False self._refs[name] = ref self._notify(name, ref) self._log( name, None, ref, committer=committer, timestamp=timestamp, timezone=timezone, message=message, ) return True def remove_if_equals( - self, name, old_ref, committer=None, timestamp=None, timezone=None, message=None + self, + name, + old_ref, + committer=None, + timestamp=None, + timezone=None, + message=None, ): if old_ref is not None and self._refs.get(name, ZERO_SHA) != old_ref: return False try: old = self._refs.pop(name) except KeyError: pass else: self._notify(name, None) self._log( name, old, None, committer=committer, timestamp=timestamp, timezone=timezone, message=message, ) return True def get_peeled(self, name): return self._peeled.get(name) def _update(self, refs): """Update multiple refs; intended only for testing.""" # TODO(dborowitz): replace this with a public function that uses # set_if_equal. for ref, sha in refs.items(): self.set_if_equals(ref, None, sha) def _update_peeled(self, peeled): """Update cached peeled refs; intended only for testing.""" self._peeled.update(peeled) class InfoRefsContainer(RefsContainer): """Refs container that reads refs from a info/refs file.""" def __init__(self, f): self._refs = {} self._peeled = {} for line in f.readlines(): sha, name = line.rstrip(b"\n").split(b"\t") if name.endswith(ANNOTATED_TAG_SUFFIX): name = name[:-3] if not check_ref_format(name): raise ValueError("invalid ref name %r" % name) self._peeled[name] = sha else: if not check_ref_format(name): raise ValueError("invalid ref name %r" % name) self._refs[name] = sha def allkeys(self): return self._refs.keys() def read_loose_ref(self, name): return self._refs.get(name, None) def get_packed_refs(self): return {} def get_peeled(self, name): try: return self._peeled[name] except KeyError: return self._refs[name] class _InotifyRefsWatcher(object): def __init__(self, path): import pyinotify from queue import Queue self.path = os.fsdecode(path) self.manager = pyinotify.WatchManager() self.manager.add_watch( self.path, pyinotify.IN_DELETE | pyinotify.IN_CLOSE_WRITE | pyinotify.IN_MOVED_TO, rec=True, auto_add=True, ) self.notifier = pyinotify.ThreadedNotifier( self.manager, default_proc_fun=self._notify ) self.queue = Queue() def _notify(self, event): if event.dir: return if event.pathname.endswith(".lock"): return ref = os.fsencode(os.path.relpath(event.pathname, self.path)) if event.maskname == "IN_DELETE": self.queue.put_nowait((ref, None)) elif event.maskname in ("IN_CLOSE_WRITE", "IN_MOVED_TO"): with open(event.pathname, "rb") as f: sha = f.readline().rstrip(b"\n\r") self.queue.put_nowait((ref, sha)) def __next__(self): return self.queue.get() def __enter__(self): self.notifier.start() return self def __exit__(self, exc_type, exc_val, exc_tb): self.notifier.stop() return False class DiskRefsContainer(RefsContainer): """Refs container that reads refs from disk.""" def __init__(self, path, worktree_path=None, logger=None): super(DiskRefsContainer, self).__init__(logger=logger) if getattr(path, "encode", None) is not None: path = os.fsencode(path) self.path = path if worktree_path is None: worktree_path = path if getattr(worktree_path, "encode", None) is not None: worktree_path = os.fsencode(worktree_path) self.worktree_path = worktree_path self._packed_refs = None self._peeled_refs = None def __repr__(self): return "%s(%r)" % (self.__class__.__name__, self.path) def subkeys(self, base): subkeys = set() path = self.refpath(base) for root, unused_dirs, files in os.walk(path): dir = root[len(path) :] if os.path.sep != "/": dir = dir.replace(os.fsencode(os.path.sep), b"/") dir = dir.strip(b"/") for filename in files: refname = b"/".join(([dir] if dir else []) + [filename]) # check_ref_format requires at least one /, so we prepend the # base before calling it. if check_ref_format(base + b"/" + refname): subkeys.add(refname) for key in self.get_packed_refs(): if key.startswith(base): subkeys.add(key[len(base) :].strip(b"/")) return subkeys def allkeys(self): allkeys = set() if os.path.exists(self.refpath(b"HEAD")): allkeys.add(b"HEAD") path = self.refpath(b"") refspath = self.refpath(b"refs") for root, unused_dirs, files in os.walk(refspath): dir = root[len(path) :] if os.path.sep != "/": dir = dir.replace(os.fsencode(os.path.sep), b"/") for filename in files: refname = b"/".join([dir, filename]) if check_ref_format(refname): allkeys.add(refname) allkeys.update(self.get_packed_refs()) return allkeys def refpath(self, name): """Return the disk path of a ref.""" if os.path.sep != "/": name = name.replace(b"/", os.fsencode(os.path.sep)) # TODO: as the 'HEAD' reference is working tree specific, it # should actually not be a part of RefsContainer if name == b"HEAD": return os.path.join(self.worktree_path, name) else: return os.path.join(self.path, name) def get_packed_refs(self): """Get contents of the packed-refs file. Returns: Dictionary mapping ref names to SHA1s Note: Will return an empty dictionary when no packed-refs file is present. """ # TODO: invalidate the cache on repacking if self._packed_refs is None: # set both to empty because we want _peeled_refs to be # None if and only if _packed_refs is also None. self._packed_refs = {} self._peeled_refs = {} path = os.path.join(self.path, b"packed-refs") try: f = GitFile(path, "rb") except FileNotFoundError: return {} with f: first_line = next(iter(f)).rstrip() if first_line.startswith(b"# pack-refs") and b" peeled" in first_line: for sha, name, peeled in read_packed_refs_with_peeled(f): self._packed_refs[name] = sha if peeled: self._peeled_refs[name] = peeled else: f.seek(0) for sha, name in read_packed_refs(f): self._packed_refs[name] = sha return self._packed_refs def get_peeled(self, name): """Return the cached peeled value of a ref, if available. Args: name: Name of the ref to peel Returns: The peeled value of the ref. If the ref is known not point to a tag, this will be the SHA the ref refers to. If the ref may point to a tag, but no cached information is available, None is returned. """ self.get_packed_refs() if self._peeled_refs is None or name not in self._packed_refs: # No cache: no peeled refs were read, or this ref is loose return None if name in self._peeled_refs: return self._peeled_refs[name] else: # Known not peelable return self[name] def read_loose_ref(self, name): """Read a reference file and return its contents. If the reference file a symbolic reference, only read the first line of the file. Otherwise, only read the first 40 bytes. Args: name: the refname to read, relative to refpath Returns: The contents of the ref file, or None if the file does not exist. Raises: IOError: if any other error occurs """ filename = self.refpath(name) try: with GitFile(filename, "rb") as f: header = f.read(len(SYMREF)) if header == SYMREF: # Read only the first line return header + next(iter(f)).rstrip(b"\r\n") else: # Read only the first 40 bytes return header + f.read(40 - len(SYMREF)) except (FileNotFoundError, IsADirectoryError, NotADirectoryError): return None def _remove_packed_ref(self, name): if self._packed_refs is None: return filename = os.path.join(self.path, b"packed-refs") # reread cached refs from disk, while holding the lock f = GitFile(filename, "wb") try: self._packed_refs = None self.get_packed_refs() if name not in self._packed_refs: return del self._packed_refs[name] if name in self._peeled_refs: del self._peeled_refs[name] write_packed_refs(f, self._packed_refs, self._peeled_refs) f.close() finally: f.abort() def set_symbolic_ref( - self, name, other, committer=None, timestamp=None, timezone=None, message=None + self, + name, + other, + committer=None, + timestamp=None, + timezone=None, + message=None, ): """Make a ref point at another ref. Args: name: Name of the ref to set other: Name of the ref to point at message: Optional message to describe the change """ self._check_refname(name) self._check_refname(other) filename = self.refpath(name) f = GitFile(filename, "wb") try: f.write(SYMREF + other + b"\n") sha = self.follow(name)[-1] self._log( name, sha, sha, committer=committer, timestamp=timestamp, timezone=timezone, message=message, ) except BaseException: f.abort() raise else: f.close() def set_if_equals( self, name, old_ref, new_ref, committer=None, timestamp=None, timezone=None, message=None, ): """Set a refname to new_ref only if it currently equals old_ref. This method follows all symbolic references, and can be used to perform an atomic compare-and-swap operation. Args: name: The refname to set. old_ref: The old sha the refname must refer to, or None to set unconditionally. new_ref: The new sha the refname will refer to. message: Set message for reflog Returns: True if the set was successful, False otherwise. """ self._check_refname(name) try: realnames, _ = self.follow(name) realname = realnames[-1] except (KeyError, IndexError): realname = name filename = self.refpath(realname) # make sure none of the ancestor folders is in packed refs probe_ref = os.path.dirname(realname) packed_refs = self.get_packed_refs() while probe_ref: if packed_refs.get(probe_ref, None) is not None: raise NotADirectoryError(filename) probe_ref = os.path.dirname(probe_ref) ensure_dir_exists(os.path.dirname(filename)) with GitFile(filename, "wb") as f: if old_ref is not None: try: # read again while holding the lock orig_ref = self.read_loose_ref(realname) if orig_ref is None: orig_ref = self.get_packed_refs().get(realname, ZERO_SHA) if orig_ref != old_ref: f.abort() return False except (OSError, IOError): f.abort() raise try: f.write(new_ref + b"\n") except (OSError, IOError): f.abort() raise self._log( realname, old_ref, new_ref, committer=committer, timestamp=timestamp, timezone=timezone, message=message, ) return True def add_if_new( - self, name, ref, committer=None, timestamp=None, timezone=None, message=None + self, + name, + ref, + committer=None, + timestamp=None, + timezone=None, + message=None, ): """Add a new reference only if it does not already exist. This method follows symrefs, and only ensures that the last ref in the chain does not exist. Args: name: The refname to set. ref: The new sha the refname will refer to. message: Optional message for reflog Returns: True if the add was successful, False otherwise. """ try: realnames, contents = self.follow(name) if contents is not None: return False realname = realnames[-1] except (KeyError, IndexError): realname = name self._check_refname(realname) filename = self.refpath(realname) ensure_dir_exists(os.path.dirname(filename)) with GitFile(filename, "wb") as f: if os.path.exists(filename) or name in self.get_packed_refs(): f.abort() return False try: f.write(ref + b"\n") except (OSError, IOError): f.abort() raise else: self._log( name, None, ref, committer=committer, timestamp=timestamp, timezone=timezone, message=message, ) return True def remove_if_equals( - self, name, old_ref, committer=None, timestamp=None, timezone=None, message=None + self, + name, + old_ref, + committer=None, + timestamp=None, + timezone=None, + message=None, ): """Remove a refname only if it currently equals old_ref. This method does not follow symbolic references. It can be used to perform an atomic compare-and-delete operation. Args: name: The refname to delete. old_ref: The old sha the refname must refer to, or None to delete unconditionally. message: Optional message Returns: True if the delete was successful, False otherwise. """ self._check_refname(name) filename = self.refpath(name) ensure_dir_exists(os.path.dirname(filename)) f = GitFile(filename, "wb") try: if old_ref is not None: orig_ref = self.read_loose_ref(name) if orig_ref is None: orig_ref = self.get_packed_refs().get(name, ZERO_SHA) if orig_ref != old_ref: return False # remove the reference file itself try: os.remove(filename) except FileNotFoundError: pass # may only be packed self._remove_packed_ref(name) self._log( name, old_ref, None, committer=committer, timestamp=timestamp, timezone=timezone, message=message, ) finally: # never write, we just wanted the lock f.abort() # outside of the lock, clean-up any parent directory that might now # be empty. this ensures that re-creating a reference of the same # name of what was previously a directory works as expected parent = name while True: try: parent, _ = parent.rsplit(b"/", 1) except ValueError: break parent_filename = self.refpath(parent) try: os.rmdir(parent_filename) except OSError: # this can be caused by the parent directory being # removed by another process, being not empty, etc. # in any case, this is non fatal because we already # removed the reference, just ignore it break return True def watch(self): import pyinotify # noqa: F401 return _InotifyRefsWatcher(self.path) def _split_ref_line(line): """Split a single ref line into a tuple of SHA1 and name.""" fields = line.rstrip(b"\n\r").split(b" ") if len(fields) != 2: raise PackedRefsException("invalid ref line %r" % line) sha, name = fields if not valid_hexsha(sha): raise PackedRefsException("Invalid hex sha %r" % sha) if not check_ref_format(name): raise PackedRefsException("invalid ref name %r" % name) return (sha, name) def read_packed_refs(f): """Read a packed refs file. Args: f: file-like object to read from Returns: Iterator over tuples with SHA1s and ref names. """ for line in f: if line.startswith(b"#"): # Comment continue if line.startswith(b"^"): raise PackedRefsException("found peeled ref in packed-refs without peeled") yield _split_ref_line(line) def read_packed_refs_with_peeled(f): """Read a packed refs file including peeled refs. Assumes the "# pack-refs with: peeled" line was already read. Yields tuples with ref names, SHA1s, and peeled SHA1s (or None). Args: f: file-like object to read from, seek'ed to the second line """ last = None for line in f: if line[0] == b"#": continue line = line.rstrip(b"\r\n") if line.startswith(b"^"): if not last: raise PackedRefsException("unexpected peeled ref line") if not valid_hexsha(line[1:]): raise PackedRefsException("Invalid hex sha %r" % line[1:]) sha, name = _split_ref_line(last) last = None yield (sha, name, line[1:]) else: if last: sha, name = _split_ref_line(last) yield (sha, name, None) last = line if last: sha, name = _split_ref_line(last) yield (sha, name, None) def write_packed_refs(f, packed_refs, peeled_refs=None): """Write a packed refs file. Args: f: empty file-like object to write to packed_refs: dict of refname to sha of packed refs to write peeled_refs: dict of refname to peeled value of sha """ if peeled_refs is None: peeled_refs = {} else: f.write(b"# pack-refs with: peeled\n") for refname in sorted(packed_refs.keys()): f.write(git_line(packed_refs[refname], refname)) if refname in peeled_refs: f.write(b"^" + peeled_refs[refname] + b"\n") def read_info_refs(f): ret = {} for line in f.readlines(): (sha, name) = line.rstrip(b"\r\n").split(b"\t", 1) ret[name] = sha return ret def write_info_refs(refs, store): """Generate info refs.""" for name, sha in sorted(refs.items()): # get_refs() includes HEAD as a special case, but we don't want to # advertise it if name == b"HEAD": continue try: o = store[sha] except KeyError: continue peeled = store.peel_sha(sha) yield o.id + b"\t" + name + b"\n" if o.id != peeled.id: yield peeled.id + b"\t" + name + ANNOTATED_TAG_SUFFIX + b"\n" def is_local_branch(x): return x.startswith(LOCAL_BRANCH_PREFIX) def strip_peeled_refs(refs): """Remove all peeled refs""" return { ref: sha for (ref, sha) in refs.items() if not ref.endswith(ANNOTATED_TAG_SUFFIX) } diff --git a/dulwich/repo.py b/dulwich/repo.py index 9d652cc0..1bd7e34f 100644 --- a/dulwich/repo.py +++ b/dulwich/repo.py @@ -1,1599 +1,1624 @@ # repo.py -- For dealing with git repositories. # Copyright (C) 2007 James Westby # Copyright (C) 2008-2013 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Repository access. This module contains the base class for git repositories (BaseRepo) and an implementation which uses a repository on local disk (Repo). """ from io import BytesIO import os import sys import stat import time from typing import Optional, Tuple, TYPE_CHECKING, List, Dict, Union, Iterable if TYPE_CHECKING: # There are no circular imports here, but we try to defer imports as long # as possible to reduce start-up time for anything that doesn't need # these imports. from dulwich.config import StackedConfig, ConfigFile from dulwich.index import Index from dulwich.errors import ( NoIndexPresent, NotBlobError, NotCommitError, NotGitRepository, NotTreeError, NotTagError, CommitError, RefFormatError, HookError, ) from dulwich.file import ( GitFile, ) from dulwich.object_store import ( DiskObjectStore, MemoryObjectStore, BaseObjectStore, ObjectStoreGraphWalker, ) from dulwich.objects import ( check_hexsha, valid_hexsha, Blob, Commit, ShaFile, Tag, Tree, ) from dulwich.pack import ( pack_objects_to_data, ) from dulwich.hooks import ( Hook, PreCommitShellHook, PostCommitShellHook, CommitMsgShellHook, PostReceiveShellHook, ) from dulwich.line_ending import BlobNormalizer from dulwich.refs import ( # noqa: F401 ANNOTATED_TAG_SUFFIX, check_ref_format, RefsContainer, DictRefsContainer, InfoRefsContainer, DiskRefsContainer, read_packed_refs, read_packed_refs_with_peeled, write_packed_refs, SYMREF, ) import warnings CONTROLDIR = ".git" OBJECTDIR = "objects" REFSDIR = "refs" REFSDIR_TAGS = "tags" REFSDIR_HEADS = "heads" INDEX_FILENAME = "index" COMMONDIR = "commondir" GITDIR = "gitdir" WORKTREES = "worktrees" BASE_DIRECTORIES = [ ["branches"], [REFSDIR], [REFSDIR, REFSDIR_TAGS], [REFSDIR, REFSDIR_HEADS], ["hooks"], ["info"], ] DEFAULT_REF = b"refs/heads/master" class InvalidUserIdentity(Exception): """User identity is not of the format 'user '""" def __init__(self, identity): self.identity = identity def _get_default_identity() -> Tuple[str, str]: import getpass import socket username = getpass.getuser() try: import pwd except ImportError: fullname = None else: try: gecos = pwd.getpwnam(username).pw_gecos except KeyError: fullname = None else: fullname = gecos.split(",")[0] if not fullname: fullname = username email = os.environ.get("EMAIL") if email is None: email = "{}@{}".format(username, socket.gethostname()) return (fullname, email) def get_user_identity(config: "StackedConfig", kind: Optional[str] = None) -> bytes: """Determine the identity to use for new commits. If kind is set, this first checks GIT_${KIND}_NAME and GIT_${KIND}_EMAIL. If those variables are not set, then it will fall back to reading the user.name and user.email settings from the specified configuration. If that also fails, then it will fall back to using the current users' identity as obtained from the host system (e.g. the gecos field, $EMAIL, $USER@$(hostname -f). Args: kind: Optional kind to return identity for, usually either "AUTHOR" or "COMMITTER". Returns: A user identity """ user = None # type: Optional[bytes] email = None # type: Optional[bytes] if kind: user_uc = os.environ.get("GIT_" + kind + "_NAME") if user_uc is not None: user = user_uc.encode("utf-8") email_uc = os.environ.get("GIT_" + kind + "_EMAIL") if email_uc is not None: email = email_uc.encode("utf-8") if user is None: try: user = config.get(("user",), "name") except KeyError: user = None if email is None: try: email = config.get(("user",), "email") except KeyError: email = None default_user, default_email = _get_default_identity() if user is None: user = default_user.encode("utf-8") if email is None: email = default_email.encode("utf-8") if email.startswith(b"<") and email.endswith(b">"): email = email[1:-1] return user + b" <" + email + b">" def check_user_identity(identity): """Verify that a user identity is formatted correctly. Args: identity: User identity bytestring Raises: InvalidUserIdentity: Raised when identity is invalid """ try: fst, snd = identity.split(b" <", 1) except ValueError: raise InvalidUserIdentity(identity) if b">" not in snd: raise InvalidUserIdentity(identity) -def parse_graftpoints(graftpoints: Iterable[bytes]) -> Dict[bytes, List[bytes]]: +def parse_graftpoints( + graftpoints: Iterable[bytes], +) -> Dict[bytes, List[bytes]]: """Convert a list of graftpoints into a dict Args: graftpoints: Iterator of graftpoint lines Each line is formatted as: []* Resulting dictionary is: : [*] https://git.wiki.kernel.org/index.php/GraftPoint """ grafts = {} for line in graftpoints: raw_graft = line.split(None, 1) commit = raw_graft[0] if len(raw_graft) == 2: parents = raw_graft[1].split() else: parents = [] for sha in [commit] + parents: check_hexsha(sha, "Invalid graftpoint") grafts[commit] = parents return grafts def serialize_graftpoints(graftpoints: Dict[bytes, List[bytes]]) -> bytes: """Convert a dictionary of grafts into string The graft dictionary is: : [*] Each line is formatted as: []* https://git.wiki.kernel.org/index.php/GraftPoint """ graft_lines = [] for commit, parents in graftpoints.items(): if parents: graft_lines.append(commit + b" " + b" ".join(parents)) else: graft_lines.append(commit) return b"\n".join(graft_lines) def _set_filesystem_hidden(path): """Mark path as to be hidden if supported by platform and filesystem. On win32 uses SetFileAttributesW api: """ if sys.platform == "win32": import ctypes from ctypes.wintypes import BOOL, DWORD, LPCWSTR FILE_ATTRIBUTE_HIDDEN = 2 SetFileAttributesW = ctypes.WINFUNCTYPE(BOOL, LPCWSTR, DWORD)( ("SetFileAttributesW", ctypes.windll.kernel32) ) if isinstance(path, bytes): path = os.fsdecode(path) if not SetFileAttributesW(path, FILE_ATTRIBUTE_HIDDEN): pass # Could raise or log `ctypes.WinError()` here # Could implement other platform specific filesytem hiding here class ParentsProvider(object): def __init__(self, store, grafts={}, shallows=[]): self.store = store self.grafts = grafts self.shallows = set(shallows) def get_parents(self, commit_id, commit=None): try: return self.grafts[commit_id] except KeyError: pass if commit_id in self.shallows: return [] if commit is None: commit = self.store[commit_id] return commit.parents class BaseRepo(object): """Base class for a git repository. :ivar object_store: Dictionary-like object for accessing the objects :ivar refs: Dictionary-like object with the refs in this repository """ def __init__(self, object_store: BaseObjectStore, refs: RefsContainer): """Open a repository. This shouldn't be called directly, but rather through one of the base classes, such as MemoryRepo or Repo. Args: object_store: Object store to use refs: Refs container to use """ self.object_store = object_store self.refs = refs self._graftpoints = {} # type: Dict[bytes, List[bytes]] self.hooks = {} # type: Dict[str, Hook] def _determine_file_mode(self) -> bool: """Probe the file-system to determine whether permissions can be trusted. Returns: True if permissions can be trusted, False otherwise. """ raise NotImplementedError(self._determine_file_mode) def _init_files(self, bare: bool) -> None: """Initialize a default set of named files.""" from dulwich.config import ConfigFile self._put_named_file("description", b"Unnamed repository") f = BytesIO() cf = ConfigFile() cf.set("core", "repositoryformatversion", "0") if self._determine_file_mode(): cf.set("core", "filemode", True) else: cf.set("core", "filemode", False) cf.set("core", "bare", bare) cf.set("core", "logallrefupdates", True) cf.write_to_file(f) self._put_named_file("config", f.getvalue()) self._put_named_file(os.path.join("info", "exclude"), b"") def get_named_file(self, path): """Get a file from the control dir with a specific name. Although the filename should be interpreted as a filename relative to the control dir in a disk-based Repo, the object returned need not be pointing to a file in that location. Args: path: The path to the file, relative to the control dir. Returns: An open file object, or None if the file does not exist. """ raise NotImplementedError(self.get_named_file) def _put_named_file(self, path, contents): """Write a file to the control dir with the given name and contents. Args: path: The path to the file, relative to the control dir. contents: A string to write to the file. """ raise NotImplementedError(self._put_named_file) def _del_named_file(self, path): """Delete a file in the contrl directory with the given name.""" raise NotImplementedError(self._del_named_file) def open_index(self): """Open the index for this repository. Raises: NoIndexPresent: If no index is present Returns: The matching `Index` """ raise NotImplementedError(self.open_index) def fetch(self, target, determine_wants=None, progress=None, depth=None): """Fetch objects into another repository. Args: target: The target repository determine_wants: Optional function to determine what refs to fetch. progress: Optional progress function depth: Optional shallow fetch depth Returns: The local refs """ if determine_wants is None: determine_wants = target.object_store.determine_wants_all count, pack_data = self.fetch_pack_data( - determine_wants, target.get_graph_walker(), progress=progress, depth=depth + determine_wants, + target.get_graph_walker(), + progress=progress, + depth=depth, ) target.object_store.add_pack_data(count, pack_data, progress) return self.get_refs() def fetch_pack_data( - self, determine_wants, graph_walker, progress, get_tagged=None, depth=None + self, + determine_wants, + graph_walker, + progress, + get_tagged=None, + depth=None, ): """Fetch the pack data required for a set of revisions. Args: determine_wants: Function that takes a dictionary with heads and returns the list of heads to fetch. graph_walker: Object that can iterate over the list of revisions to fetch and has an "ack" method that will be called to acknowledge that a revision is present. progress: Simple progress function that will be called with updated progress strings. get_tagged: Function that returns a dict of pointed-to sha -> tag sha for including tags. depth: Shallow fetch depth Returns: count and iterator over pack data """ # TODO(jelmer): Fetch pack data directly, don't create objects first. objects = self.fetch_objects( determine_wants, graph_walker, progress, get_tagged, depth=depth ) return pack_objects_to_data(objects) def fetch_objects( - self, determine_wants, graph_walker, progress, get_tagged=None, depth=None + self, + determine_wants, + graph_walker, + progress, + get_tagged=None, + depth=None, ): """Fetch the missing objects required for a set of revisions. Args: determine_wants: Function that takes a dictionary with heads and returns the list of heads to fetch. graph_walker: Object that can iterate over the list of revisions to fetch and has an "ack" method that will be called to acknowledge that a revision is present. progress: Simple progress function that will be called with updated progress strings. get_tagged: Function that returns a dict of pointed-to sha -> tag sha for including tags. depth: Shallow fetch depth Returns: iterator over objects, with __len__ implemented """ if depth not in (None, 0): raise NotImplementedError("depth not supported yet") refs = {} for ref, sha in self.get_refs().items(): try: obj = self.object_store[sha] except KeyError: warnings.warn( "ref %s points at non-present sha %s" % (ref.decode("utf-8", "replace"), sha.decode("ascii")), UserWarning, ) continue else: if isinstance(obj, Tag): refs[ref + ANNOTATED_TAG_SUFFIX] = obj.object[1] refs[ref] = sha wants = determine_wants(refs) if not isinstance(wants, list): raise TypeError("determine_wants() did not return a list") shallows = getattr(graph_walker, "shallow", frozenset()) unshallows = getattr(graph_walker, "unshallow", frozenset()) if wants == []: # TODO(dborowitz): find a way to short-circuit that doesn't change # this interface. if shallows or unshallows: # Do not send a pack in shallow short-circuit path return None return [] # If the graph walker is set up with an implementation that can # ACK/NAK to the wire, it will write data to the client through # this call as a side-effect. haves = self.object_store.find_common_revisions(graph_walker) # Deal with shallow requests separately because the haves do # not reflect what objects are missing if shallows or unshallows: # TODO: filter the haves commits from iter_shas. the specific # commits aren't missing. haves = [] parents_provider = ParentsProvider(self.object_store, shallows=shallows) def get_parents(commit): return parents_provider.get_parents(commit.id, commit) return self.object_store.iter_shas( self.object_store.find_missing_objects( haves, wants, self.get_shallow(), progress, get_tagged, get_parents=get_parents, ) ) def generate_pack_data(self, have, want, progress=None, ofs_delta=None): """Generate pack data objects for a set of wants/haves. Args: have: List of SHA1s of objects that should not be sent want: List of SHA1s of objects that should be sent ofs_delta: Whether OFS deltas can be included progress: Optional progress reporting method """ return self.object_store.generate_pack_data( have, want, shallow=self.get_shallow(), progress=progress, ofs_delta=ofs_delta, ) def get_graph_walker(self, heads=None): """Retrieve a graph walker. A graph walker is used by a remote repository (or proxy) to find out which objects are present in this repository. Args: heads: Repository heads to use (optional) Returns: A graph walker object """ if heads is None: heads = [ sha for sha in self.refs.as_dict(b"refs/heads").values() if sha in self.object_store ] parents_provider = ParentsProvider(self.object_store) return ObjectStoreGraphWalker( heads, parents_provider.get_parents, shallow=self.get_shallow() ) def get_refs(self) -> Dict[bytes, bytes]: """Get dictionary with all refs. Returns: A ``dict`` mapping ref names to SHA1s """ return self.refs.as_dict() def head(self) -> bytes: """Return the SHA1 pointed at by HEAD.""" return self.refs[b"HEAD"] def _get_object(self, sha, cls): assert len(sha) in (20, 40) ret = self.get_object(sha) if not isinstance(ret, cls): if cls is Commit: raise NotCommitError(ret) elif cls is Blob: raise NotBlobError(ret) elif cls is Tree: raise NotTreeError(ret) elif cls is Tag: raise NotTagError(ret) else: raise Exception( "Type invalid: %r != %r" % (ret.type_name, cls.type_name) ) return ret def get_object(self, sha: bytes) -> ShaFile: """Retrieve the object with the specified SHA. Args: sha: SHA to retrieve Returns: A ShaFile object Raises: KeyError: when the object can not be found """ return self.object_store[sha] def parents_provider(self): return ParentsProvider( - self.object_store, grafts=self._graftpoints, shallows=self.get_shallow() + self.object_store, + grafts=self._graftpoints, + shallows=self.get_shallow(), ) def get_parents(self, sha: bytes, commit: Commit = None) -> List[bytes]: """Retrieve the parents of a specific commit. If the specific commit is a graftpoint, the graft parents will be returned instead. Args: sha: SHA of the commit for which to retrieve the parents commit: Optional commit matching the sha Returns: List of parents """ return self.parents_provider().get_parents(sha, commit) def get_config(self): """Retrieve the config object. Returns: `ConfigFile` object for the ``.git/config`` file. """ raise NotImplementedError(self.get_config) def get_description(self): """Retrieve the description for this repository. Returns: String with the description of the repository as set by the user. """ raise NotImplementedError(self.get_description) def set_description(self, description): """Set the description for this repository. Args: description: Text to set as description for this repository. """ raise NotImplementedError(self.set_description) def get_config_stack(self) -> "StackedConfig": """Return a config stack for this repository. This stack accesses the configuration for both this repository itself (.git/config) and the global configuration, which usually lives in ~/.gitconfig. Returns: `Config` instance for this repository """ from dulwich.config import StackedConfig backends = [self.get_config()] + StackedConfig.default_backends() return StackedConfig(backends, writable=backends[0]) def get_shallow(self): """Get the set of shallow commits. Returns: Set of shallow commits. """ f = self.get_named_file("shallow") if f is None: return set() with f: return set(line.strip() for line in f) def update_shallow(self, new_shallow, new_unshallow): """Update the list of shallow objects. Args: new_shallow: Newly shallow objects new_unshallow: Newly no longer shallow objects """ shallow = self.get_shallow() if new_shallow: shallow.update(new_shallow) if new_unshallow: shallow.difference_update(new_unshallow) self._put_named_file("shallow", b"".join([sha + b"\n" for sha in shallow])) def get_peeled(self, ref): """Get the peeled value of a ref. Args: ref: The refname to peel. Returns: The fully-peeled SHA1 of a tag object, after peeling all intermediate tags; if the original ref does not point to a tag, this will equal the original SHA1. """ cached = self.refs.get_peeled(ref) if cached is not None: return cached return self.object_store.peel_sha(self.refs[ref]).id def get_walker(self, include=None, *args, **kwargs): """Obtain a walker for this repository. Args: include: Iterable of SHAs of commits to include along with their ancestors. Defaults to [HEAD] exclude: Iterable of SHAs of commits to exclude along with their ancestors, overriding includes. order: ORDER_* constant specifying the order of results. Anything other than ORDER_DATE may result in O(n) memory usage. reverse: If True, reverse the order of output, requiring O(n) memory. max_entries: The maximum number of entries to yield, or None for no limit. paths: Iterable of file or subtree paths to show entries for. rename_detector: diff.RenameDetector object for detecting renames. follow: If True, follow path across renames/copies. Forces a default rename_detector. since: Timestamp to list commits after. until: Timestamp to list commits before. queue_cls: A class to use for a queue of commits, supporting the iterator protocol. The constructor takes a single argument, the Walker. Returns: A `Walker` object """ from dulwich.walk import Walker if include is None: include = [self.head()] if isinstance(include, str): include = [include] kwargs["get_parents"] = lambda commit: self.get_parents(commit.id, commit) return Walker(self.object_store, include, *args, **kwargs) def __getitem__(self, name): """Retrieve a Git object by SHA1 or ref. Args: name: A Git object SHA1 or a ref name Returns: A `ShaFile` object, such as a Commit or Blob Raises: KeyError: when the specified ref or object does not exist """ if not isinstance(name, bytes): raise TypeError( "'name' must be bytestring, not %.80s" % type(name).__name__ ) if len(name) in (20, 40): try: return self.object_store[name] except (KeyError, ValueError): pass try: return self.object_store[self.refs[name]] except RefFormatError: raise KeyError(name) def __contains__(self, name: bytes) -> bool: """Check if a specific Git object or ref is present. Args: name: Git object SHA1 or ref name """ if len(name) == 20 or (len(name) == 40 and valid_hexsha(name)): return name in self.object_store or name in self.refs else: return name in self.refs def __setitem__(self, name: bytes, value: Union[ShaFile, bytes]): """Set a ref. Args: name: ref name value: Ref value - either a ShaFile object, or a hex sha """ if name.startswith(b"refs/") or name == b"HEAD": if isinstance(value, ShaFile): self.refs[name] = value.id elif isinstance(value, bytes): self.refs[name] = value else: raise TypeError(value) else: raise ValueError(name) def __delitem__(self, name: bytes): """Remove a ref. Args: name: Name of the ref to remove """ if name.startswith(b"refs/") or name == b"HEAD": del self.refs[name] else: raise ValueError(name) def _get_user_identity(self, config: "StackedConfig", kind: str = None) -> bytes: """Determine the identity to use for new commits.""" # TODO(jelmer): Deprecate this function in favor of get_user_identity return get_user_identity(config) def _add_graftpoints(self, updated_graftpoints: Dict[bytes, List[bytes]]): """Add or modify graftpoints Args: updated_graftpoints: Dict of commit shas to list of parent shas """ # Simple validation for commit, parents in updated_graftpoints.items(): for sha in [commit] + parents: check_hexsha(sha, "Invalid graftpoint") self._graftpoints.update(updated_graftpoints) def _remove_graftpoints(self, to_remove: List[bytes] = []) -> None: """Remove graftpoints Args: to_remove: List of commit shas """ for sha in to_remove: del self._graftpoints[sha] def _read_heads(self, name): f = self.get_named_file(name) if f is None: return [] with f: return [line.strip() for line in f.readlines() if line.strip()] - def do_commit( + def do_commit( # noqa: C901 self, message=None, committer=None, author=None, commit_timestamp=None, commit_timezone=None, author_timestamp=None, author_timezone=None, tree=None, encoding=None, ref=b"HEAD", merge_heads=None, no_verify=False, ): """Create a new commit. If not specified, `committer` and `author` default to get_user_identity(..., 'COMMITTER') and get_user_identity(..., 'AUTHOR') respectively. Args: message: Commit message committer: Committer fullname author: Author fullname commit_timestamp: Commit timestamp (defaults to now) commit_timezone: Commit timestamp timezone (defaults to GMT) author_timestamp: Author timestamp (defaults to commit timestamp) author_timezone: Author timestamp timezone (defaults to commit timestamp timezone) tree: SHA1 of the tree root to use (if not specified the current index will be committed). encoding: Encoding ref: Optional ref to commit to (defaults to current branch) merge_heads: Merge heads (defaults to .git/MERGE_HEADS) no_verify: Skip pre-commit and commit-msg hooks Returns: New commit SHA1 """ import time c = Commit() if tree is None: index = self.open_index() c.tree = index.commit(self.object_store) else: if len(tree) != 40: raise ValueError("tree must be a 40-byte hex sha string") c.tree = tree try: if not no_verify: self.hooks["pre-commit"].execute() except HookError as e: raise CommitError(e) except KeyError: # no hook defined, silent fallthrough pass config = self.get_config_stack() if merge_heads is None: merge_heads = self._read_heads("MERGE_HEADS") if committer is None: committer = get_user_identity(config, kind="COMMITTER") check_user_identity(committer) c.committer = committer if commit_timestamp is None: # FIXME: Support GIT_COMMITTER_DATE environment variable commit_timestamp = time.time() c.commit_time = int(commit_timestamp) if commit_timezone is None: # FIXME: Use current user timezone rather than UTC commit_timezone = 0 c.commit_timezone = commit_timezone if author is None: author = get_user_identity(config, kind="AUTHOR") c.author = author check_user_identity(author) if author_timestamp is None: # FIXME: Support GIT_AUTHOR_DATE environment variable author_timestamp = commit_timestamp c.author_time = int(author_timestamp) if author_timezone is None: author_timezone = commit_timezone c.author_timezone = author_timezone if encoding is None: try: encoding = config.get(("i18n",), "commitEncoding") except KeyError: pass # No dice if encoding is not None: c.encoding = encoding if message is None: # FIXME: Try to read commit message from .git/MERGE_MSG raise ValueError("No commit message specified") try: if no_verify: c.message = message else: c.message = self.hooks["commit-msg"].execute(message) if c.message is None: c.message = message except HookError as e: raise CommitError(e) except KeyError: # no hook defined, message not modified c.message = message if ref is None: # Create a dangling commit c.parents = merge_heads self.object_store.add_object(c) else: try: old_head = self.refs[ref] c.parents = [old_head] + merge_heads self.object_store.add_object(c) ok = self.refs.set_if_equals( ref, old_head, c.id, message=b"commit: " + message, committer=committer, timestamp=commit_timestamp, timezone=commit_timezone, ) except KeyError: c.parents = merge_heads self.object_store.add_object(c) ok = self.refs.add_if_new( ref, c.id, message=b"commit: " + message, committer=committer, timestamp=commit_timestamp, timezone=commit_timezone, ) if not ok: # Fail if the atomic compare-and-swap failed, leaving the # commit and all its objects as garbage. raise CommitError("%s changed during commit" % (ref,)) self._del_named_file("MERGE_HEADS") try: self.hooks["post-commit"].execute() except HookError as e: # silent failure warnings.warn("post-commit hook failed: %s" % e, UserWarning) except KeyError: # no hook defined, silent fallthrough pass return c.id def read_gitfile(f): """Read a ``.git`` file. The first line of the file should start with "gitdir: " Args: f: File-like object to read from Returns: A path """ cs = f.read() if not cs.startswith("gitdir: "): raise ValueError("Expected file to start with 'gitdir: '") return cs[len("gitdir: ") :].rstrip("\n") class UnsupportedVersion(Exception): """Unsupported repository version.""" def __init__(self, version): self.version = version class Repo(BaseRepo): """A git repository backed by local disk. To open an existing repository, call the contructor with the path of the repository. To create a new repository, use the Repo.init class method. """ def __init__(self, root): hidden_path = os.path.join(root, CONTROLDIR) if os.path.isdir(os.path.join(hidden_path, OBJECTDIR)): self.bare = False self._controldir = hidden_path elif os.path.isdir(os.path.join(root, OBJECTDIR)) and os.path.isdir( os.path.join(root, REFSDIR) ): self.bare = True self._controldir = root elif os.path.isfile(hidden_path): self.bare = False with open(hidden_path, "r") as f: path = read_gitfile(f) self.bare = False self._controldir = os.path.join(root, path) else: raise NotGitRepository( "No git repository was found at %(path)s" % dict(path=root) ) commondir = self.get_named_file(COMMONDIR) if commondir is not None: with commondir: self._commondir = os.path.join( - self.controldir(), os.fsdecode(commondir.read().rstrip(b"\r\n")) + self.controldir(), + os.fsdecode(commondir.read().rstrip(b"\r\n")), ) else: self._commondir = self._controldir self.path = root config = self.get_config() try: format_version = int(config.get("core", "repositoryformatversion")) except KeyError: format_version = 0 if format_version != 0: raise UnsupportedVersion(format_version) object_store = DiskObjectStore.from_config( os.path.join(self.commondir(), OBJECTDIR), config ) refs = DiskRefsContainer( self.commondir(), self._controldir, logger=self._write_reflog ) BaseRepo.__init__(self, object_store, refs) self._graftpoints = {} graft_file = self.get_named_file( os.path.join("info", "grafts"), basedir=self.commondir() ) if graft_file: with graft_file: self._graftpoints.update(parse_graftpoints(graft_file)) graft_file = self.get_named_file("shallow", basedir=self.commondir()) if graft_file: with graft_file: self._graftpoints.update(parse_graftpoints(graft_file)) self.hooks["pre-commit"] = PreCommitShellHook(self.controldir()) self.hooks["commit-msg"] = CommitMsgShellHook(self.controldir()) self.hooks["post-commit"] = PostCommitShellHook(self.controldir()) self.hooks["post-receive"] = PostReceiveShellHook(self.controldir()) def _write_reflog( self, ref, old_sha, new_sha, committer, timestamp, timezone, message ): from .reflog import format_reflog_line path = os.path.join(self.controldir(), "logs", os.fsdecode(ref)) try: os.makedirs(os.path.dirname(path)) except FileExistsError: pass if committer is None: config = self.get_config_stack() committer = self._get_user_identity(config) check_user_identity(committer) if timestamp is None: timestamp = int(time.time()) if timezone is None: timezone = 0 # FIXME with open(path, "ab") as f: f.write( format_reflog_line( old_sha, new_sha, committer, timestamp, timezone, message ) + b"\n" ) @classmethod def discover(cls, start="."): """Iterate parent directories to discover a repository Return a Repo object for the first parent directory that looks like a Git repository. Args: start: The directory to start discovery from (defaults to '.') """ remaining = True path = os.path.abspath(start) while remaining: try: return cls(path) except NotGitRepository: path, remaining = os.path.split(path) raise NotGitRepository( "No git repository was found at %(path)s" % dict(path=start) ) def controldir(self): """Return the path of the control directory.""" return self._controldir def commondir(self): """Return the path of the common directory. For a main working tree, it is identical to controldir(). For a linked working tree, it is the control directory of the main working tree.""" return self._commondir def _determine_file_mode(self): """Probe the file-system to determine whether permissions can be trusted. Returns: True if permissions can be trusted, False otherwise. """ fname = os.path.join(self.path, ".probe-permissions") with open(fname, "w") as f: f.write("") st1 = os.lstat(fname) try: os.chmod(fname, st1.st_mode ^ stat.S_IXUSR) except PermissionError: return False st2 = os.lstat(fname) os.unlink(fname) mode_differs = st1.st_mode != st2.st_mode st2_has_exec = (st2.st_mode & stat.S_IXUSR) != 0 return mode_differs and st2_has_exec def _put_named_file(self, path, contents): """Write a file to the control dir with the given name and contents. Args: path: The path to the file, relative to the control dir. contents: A string to write to the file. """ path = path.lstrip(os.path.sep) with GitFile(os.path.join(self.controldir(), path), "wb") as f: f.write(contents) def _del_named_file(self, path): try: os.unlink(os.path.join(self.controldir(), path)) except FileNotFoundError: return def get_named_file(self, path, basedir=None): """Get a file from the control dir with a specific name. Although the filename should be interpreted as a filename relative to the control dir in a disk-based Repo, the object returned need not be pointing to a file in that location. Args: path: The path to the file, relative to the control dir. basedir: Optional argument that specifies an alternative to the control dir. Returns: An open file object, or None if the file does not exist. """ # TODO(dborowitz): sanitize filenames, since this is used directly by # the dumb web serving code. if basedir is None: basedir = self.controldir() path = path.lstrip(os.path.sep) try: return open(os.path.join(basedir, path), "rb") except FileNotFoundError: return None def index_path(self): """Return path to the index file.""" return os.path.join(self.controldir(), INDEX_FILENAME) def open_index(self) -> "Index": """Open the index for this repository. Raises: NoIndexPresent: If no index is present Returns: The matching `Index` """ from dulwich.index import Index if not self.has_index(): raise NoIndexPresent() return Index(self.index_path()) def has_index(self): """Check if an index is present.""" # Bare repos must never have index files; non-bare repos may have a # missing index file, which is treated as empty. return not self.bare def stage(self, fs_paths): """Stage a set of paths. Args: fs_paths: List of paths, relative to the repository path """ root_path_bytes = os.fsencode(self.path) if not isinstance(fs_paths, list): fs_paths = [fs_paths] from dulwich.index import ( blob_from_path_and_stat, index_entry_from_stat, _fs_to_tree_path, ) index = self.open_index() blob_normalizer = self.get_blob_normalizer() for fs_path in fs_paths: if not isinstance(fs_path, bytes): fs_path = os.fsencode(fs_path) if os.path.isabs(fs_path): raise ValueError( "path %r should be relative to " "repository root, not absolute" % fs_path ) tree_path = _fs_to_tree_path(fs_path) full_path = os.path.join(root_path_bytes, fs_path) try: st = os.lstat(full_path) except OSError: # File no longer exists try: del index[tree_path] except KeyError: pass # already removed else: if not stat.S_ISREG(st.st_mode) and not stat.S_ISLNK(st.st_mode): try: del index[tree_path] except KeyError: pass else: blob = blob_from_path_and_stat(full_path, st) blob = blob_normalizer.checkin_normalize(blob, fs_path) self.object_store.add_object(blob) index[tree_path] = index_entry_from_stat(st, blob.id, 0) index.write() def clone( - self, target_path, mkdir=True, bare=False, origin=b"origin", checkout=None + self, + target_path, + mkdir=True, + bare=False, + origin=b"origin", + checkout=None, ): """Clone this repository. Args: target_path: Target path mkdir: Create the target directory bare: Whether to create a bare repository origin: Base name for refs in target repository cloned from this repository Returns: Created repository as `Repo` """ if not bare: target = self.init(target_path, mkdir=mkdir) else: if checkout: raise ValueError("checkout and bare are incompatible") target = self.init_bare(target_path, mkdir=mkdir) self.fetch(target) encoded_path = self.path if not isinstance(encoded_path, bytes): encoded_path = os.fsencode(encoded_path) ref_message = b"clone: from " + encoded_path target.refs.import_refs( b"refs/remotes/" + origin, self.refs.as_dict(b"refs/heads"), message=ref_message, ) target.refs.import_refs( b"refs/tags", self.refs.as_dict(b"refs/tags"), message=ref_message ) try: target.refs.add_if_new( DEFAULT_REF, self.refs[DEFAULT_REF], message=ref_message ) except KeyError: pass target_config = target.get_config() target_config.set(("remote", "origin"), "url", encoded_path) target_config.set( - ("remote", "origin"), "fetch", "+refs/heads/*:refs/remotes/origin/*" + ("remote", "origin"), + "fetch", + "+refs/heads/*:refs/remotes/origin/*", ) target_config.write_to_path() # Update target head head_chain, head_sha = self.refs.follow(b"HEAD") if head_chain and head_sha is not None: target.refs.set_symbolic_ref(b"HEAD", head_chain[-1], message=ref_message) target[b"HEAD"] = head_sha if checkout is None: checkout = not bare if checkout: # Checkout HEAD to target dir target.reset_index() return target def reset_index(self, tree=None): """Reset the index back to a specific tree. Args: tree: Tree SHA to reset to, None for current HEAD tree. """ from dulwich.index import ( build_index_from_tree, validate_path_element_default, validate_path_element_ntfs, ) if tree is None: tree = self[b"HEAD"].tree config = self.get_config() honor_filemode = config.get_boolean(b"core", b"filemode", os.name != "nt") if config.get_boolean(b"core", b"core.protectNTFS", os.name == "nt"): validate_path_element = validate_path_element_ntfs else: validate_path_element = validate_path_element_default return build_index_from_tree( self.path, self.index_path(), self.object_store, tree, honor_filemode=honor_filemode, validate_path_element=validate_path_element, ) def get_config(self) -> "ConfigFile": """Retrieve the config object. Returns: `ConfigFile` object for the ``.git/config`` file. """ from dulwich.config import ConfigFile path = os.path.join(self._controldir, "config") try: return ConfigFile.from_path(path) except FileNotFoundError: ret = ConfigFile() ret.path = path return ret def get_description(self): """Retrieve the description of this repository. Returns: A string describing the repository or None. """ path = os.path.join(self._controldir, "description") try: with GitFile(path, "rb") as f: return f.read() except FileNotFoundError: return None def __repr__(self): return "" % self.path def set_description(self, description): """Set the description for this repository. Args: description: Text to set as description for this repository. """ self._put_named_file("description", description) @classmethod def _init_maybe_bare(cls, path, bare): for d in BASE_DIRECTORIES: os.mkdir(os.path.join(path, *d)) DiskObjectStore.init(os.path.join(path, OBJECTDIR)) ret = cls(path) ret.refs.set_symbolic_ref(b"HEAD", DEFAULT_REF) ret._init_files(bare) return ret @classmethod def init(cls, path, mkdir=False): """Create a new repository. Args: path: Path in which to create the repository mkdir: Whether to create the directory Returns: `Repo` instance """ if mkdir: os.mkdir(path) controldir = os.path.join(path, CONTROLDIR) os.mkdir(controldir) _set_filesystem_hidden(controldir) cls._init_maybe_bare(controldir, False) return cls(path) @classmethod def _init_new_working_directory(cls, path, main_repo, identifier=None, mkdir=False): """Create a new working directory linked to a repository. Args: path: Path in which to create the working tree. main_repo: Main repository to reference identifier: Worktree identifier mkdir: Whether to create the directory Returns: `Repo` instance """ if mkdir: os.mkdir(path) if identifier is None: identifier = os.path.basename(path) main_worktreesdir = os.path.join(main_repo.controldir(), WORKTREES) worktree_controldir = os.path.join(main_worktreesdir, identifier) gitdirfile = os.path.join(path, CONTROLDIR) with open(gitdirfile, "wb") as f: f.write(b"gitdir: " + os.fsencode(worktree_controldir) + b"\n") try: os.mkdir(main_worktreesdir) except FileExistsError: pass try: os.mkdir(worktree_controldir) except FileExistsError: pass with open(os.path.join(worktree_controldir, GITDIR), "wb") as f: f.write(os.fsencode(gitdirfile) + b"\n") with open(os.path.join(worktree_controldir, COMMONDIR), "wb") as f: f.write(b"../..\n") with open(os.path.join(worktree_controldir, "HEAD"), "wb") as f: f.write(main_repo.head() + b"\n") r = cls(path) r.reset_index() return r @classmethod def init_bare(cls, path, mkdir=False): """Create a new bare repository. ``path`` should already exist and be an empty directory. Args: path: Path to create bare repository in Returns: a `Repo` instance """ if mkdir: os.mkdir(path) return cls._init_maybe_bare(path, True) create = init_bare def close(self): """Close any files opened by this repository.""" self.object_store.close() def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.close() def get_blob_normalizer(self): """Return a BlobNormalizer object""" # TODO Parse the git attributes files git_attributes = {} return BlobNormalizer(self.get_config_stack(), git_attributes) class MemoryRepo(BaseRepo): """Repo that stores refs, objects, and named files in memory. MemoryRepos are always bare: they have no working tree and no index, since those have a stronger dependency on the filesystem. """ def __init__(self): from dulwich.config import ConfigFile self._reflog = [] refs_container = DictRefsContainer({}, logger=self._append_reflog) BaseRepo.__init__(self, MemoryObjectStore(), refs_container) self._named_files = {} self.bare = True self._config = ConfigFile() self._description = None def _append_reflog(self, *args): self._reflog.append(args) def set_description(self, description): self._description = description def get_description(self): return self._description def _determine_file_mode(self): """Probe the file-system to determine whether permissions can be trusted. Returns: True if permissions can be trusted, False otherwise. """ return sys.platform != "win32" def _put_named_file(self, path, contents): """Write a file to the control dir with the given name and contents. Args: path: The path to the file, relative to the control dir. contents: A string to write to the file. """ self._named_files[path] = contents def _del_named_file(self, path): try: del self._named_files[path] except KeyError: pass def get_named_file(self, path, basedir=None): """Get a file from the control dir with a specific name. Although the filename should be interpreted as a filename relative to the control dir in a disk-baked Repo, the object returned need not be pointing to a file in that location. Args: path: The path to the file, relative to the control dir. Returns: An open file object, or None if the file does not exist. """ contents = self._named_files.get(path, None) if contents is None: return None return BytesIO(contents) def open_index(self): """Fail to open index for this repo, since it is bare. Raises: NoIndexPresent: Raised when no index is present """ raise NoIndexPresent() def get_config(self): """Retrieve the config object. Returns: `ConfigFile` object. """ return self._config @classmethod def init_bare(cls, objects, refs): """Create a new bare repository in memory. Args: objects: Objects for the new repository, as iterable refs: Refs as dictionary, mapping names to object SHA1s """ ret = cls() for obj in objects: ret.object_store.add_object(obj) for refname, sha in refs.items(): ret.refs.add_if_new(refname, sha) ret._init_files(bare=True) return ret diff --git a/dulwich/server.py b/dulwich/server.py index b58ddd28..206b9c24 100644 --- a/dulwich/server.py +++ b/dulwich/server.py @@ -1,1265 +1,1278 @@ # server.py -- Implementation of the server side git protocols # Copyright (C) 2008 John Carr # Coprygith (C) 2011-2012 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Git smart network protocol server implementation. For more detailed implementation on the network protocol, see the Documentation/technical directory in the cgit distribution, and in particular: * Documentation/technical/protocol-capabilities.txt * Documentation/technical/pack-protocol.txt Currently supported capabilities: * include-tag * thin-pack * multi_ack_detailed * multi_ack * side-band-64k * ofs-delta * no-progress * report-status * delete-refs * shallow * symref """ import collections import os import socket import sys import time from typing import List, Tuple, Dict, Optional, Iterable import zlib import socketserver from dulwich.archive import tar_stream from dulwich.errors import ( ApplyDeltaError, ChecksumMismatch, GitProtocolError, HookError, NotGitRepository, UnexpectedCommandError, ObjectFormatException, ) from dulwich import log_utils from dulwich.objects import ( Commit, valid_hexsha, ) from dulwich.pack import ( write_pack_objects, ) from dulwich.protocol import ( # noqa: F401 BufferedPktLineWriter, capability_agent, CAPABILITIES_REF, CAPABILITY_AGENT, CAPABILITY_DELETE_REFS, CAPABILITY_INCLUDE_TAG, CAPABILITY_MULTI_ACK_DETAILED, CAPABILITY_MULTI_ACK, CAPABILITY_NO_DONE, CAPABILITY_NO_PROGRESS, CAPABILITY_OFS_DELTA, CAPABILITY_QUIET, CAPABILITY_REPORT_STATUS, CAPABILITY_SHALLOW, CAPABILITY_SIDE_BAND_64K, CAPABILITY_THIN_PACK, COMMAND_DEEPEN, COMMAND_DONE, COMMAND_HAVE, COMMAND_SHALLOW, COMMAND_UNSHALLOW, COMMAND_WANT, MULTI_ACK, MULTI_ACK_DETAILED, Protocol, ProtocolFile, ReceivableProtocol, SIDE_BAND_CHANNEL_DATA, SIDE_BAND_CHANNEL_PROGRESS, SIDE_BAND_CHANNEL_FATAL, SINGLE_ACK, TCP_GIT_PORT, ZERO_SHA, ack_type, extract_capabilities, extract_want_line_capabilities, symref_capabilities, ) from dulwich.refs import ( ANNOTATED_TAG_SUFFIX, write_info_refs, ) from dulwich.repo import ( BaseRepo, Repo, ) logger = log_utils.getLogger(__name__) class Backend(object): """A backend for the Git smart server implementation.""" def open_repository(self, path): """Open the repository at a path. Args: path: Path to the repository Raises: NotGitRepository: no git repository was found at path Returns: Instance of BackendRepo """ raise NotImplementedError(self.open_repository) class BackendRepo(object): """Repository abstraction used by the Git server. The methods required here are a subset of those provided by dulwich.repo.Repo. """ object_store = None refs = None def get_refs(self) -> Dict[bytes, bytes]: """ Get all the refs in the repository Returns: dict of name -> sha """ raise NotImplementedError def get_peeled(self, name: bytes) -> Optional[bytes]: """Return the cached peeled value of a ref, if available. Args: name: Name of the ref to peel Returns: The peeled value of the ref. If the ref is known not point to a tag, this will be the SHA the ref refers to. If no cached information about a tag is available, this method may return None, but it should attempt to peel the tag if possible. """ return None def fetch_objects(self, determine_wants, graph_walker, progress, get_tagged=None): """ Yield the objects required for a list of commits. Args: progress: is a callback to send progress messages to the client get_tagged: Function that returns a dict of pointed-to sha -> tag sha for including tags. """ raise NotImplementedError class DictBackend(Backend): """Trivial backend that looks up Git repositories in a dictionary.""" def __init__(self, repos): self.repos = repos def open_repository(self, path: str) -> BaseRepo: logger.debug("Opening repository at %s", path) try: return self.repos[path] except KeyError: raise NotGitRepository( "No git repository was found at %(path)s" % dict(path=path) ) class FileSystemBackend(Backend): """Simple backend looking up Git repositories in the local file system.""" def __init__(self, root=os.sep): super(FileSystemBackend, self).__init__() self.root = (os.path.abspath(root) + os.sep).replace(os.sep * 2, os.sep) def open_repository(self, path): logger.debug("opening repository at %s", path) abspath = os.path.abspath(os.path.join(self.root, path)) + os.sep normcase_abspath = os.path.normcase(abspath) normcase_root = os.path.normcase(self.root) if not normcase_abspath.startswith(normcase_root): raise NotGitRepository("Path %r not inside root %r" % (path, self.root)) return Repo(abspath) class Handler(object): """Smart protocol command handler base class.""" def __init__(self, backend, proto, stateless_rpc=None): self.backend = backend self.proto = proto self.stateless_rpc = stateless_rpc def handle(self): raise NotImplementedError(self.handle) class PackHandler(Handler): """Protocol handler for packs.""" def __init__(self, backend, proto, stateless_rpc=None): super(PackHandler, self).__init__(backend, proto, stateless_rpc) self._client_capabilities = None # Flags needed for the no-done capability self._done_received = False @classmethod def capability_line(cls, capabilities): logger.info("Sending capabilities: %s", capabilities) return b"".join([b" " + c for c in capabilities]) @classmethod def capabilities(cls) -> Iterable[bytes]: raise NotImplementedError(cls.capabilities) @classmethod def innocuous_capabilities(cls) -> Iterable[bytes]: return [ CAPABILITY_INCLUDE_TAG, CAPABILITY_THIN_PACK, CAPABILITY_NO_PROGRESS, CAPABILITY_OFS_DELTA, capability_agent(), ] @classmethod def required_capabilities(cls) -> Iterable[bytes]: """Return a list of capabilities that we require the client to have.""" return [] def set_client_capabilities(self, caps: Iterable[bytes]) -> None: allowable_caps = set(self.innocuous_capabilities()) allowable_caps.update(self.capabilities()) for cap in caps: if cap.startswith(CAPABILITY_AGENT + b"="): continue if cap not in allowable_caps: raise GitProtocolError( "Client asked for capability %r that " "was not advertised." % cap ) for cap in self.required_capabilities(): if cap not in caps: raise GitProtocolError( "Client does not support required " "capability %r." % cap ) self._client_capabilities = set(caps) logger.info("Client capabilities: %s", caps) def has_capability(self, cap: bytes) -> bool: if self._client_capabilities is None: raise GitProtocolError( "Server attempted to access capability %r " "before asking client" % cap ) return cap in self._client_capabilities def notify_done(self) -> None: self._done_received = True class UploadPackHandler(PackHandler): """Protocol handler for uploading a pack to the client.""" def __init__(self, backend, args, proto, stateless_rpc=None, advertise_refs=False): super(UploadPackHandler, self).__init__( backend, proto, stateless_rpc=stateless_rpc ) self.repo = backend.open_repository(args[0]) self._graph_walker = None self.advertise_refs = advertise_refs # A state variable for denoting that the have list is still # being processed, and the client is not accepting any other # data (such as side-band, see the progress method here). self._processing_have_lines = False @classmethod def capabilities(cls): return [ CAPABILITY_MULTI_ACK_DETAILED, CAPABILITY_MULTI_ACK, CAPABILITY_SIDE_BAND_64K, CAPABILITY_THIN_PACK, CAPABILITY_OFS_DELTA, CAPABILITY_NO_PROGRESS, CAPABILITY_INCLUDE_TAG, CAPABILITY_SHALLOW, CAPABILITY_NO_DONE, ] @classmethod def required_capabilities(cls): - return (CAPABILITY_SIDE_BAND_64K, CAPABILITY_THIN_PACK, CAPABILITY_OFS_DELTA) + return ( + CAPABILITY_SIDE_BAND_64K, + CAPABILITY_THIN_PACK, + CAPABILITY_OFS_DELTA, + ) def progress(self, message): if self.has_capability(CAPABILITY_NO_PROGRESS) or self._processing_have_lines: return self.proto.write_sideband(SIDE_BAND_CHANNEL_PROGRESS, message) def get_tagged(self, refs=None, repo=None): """Get a dict of peeled values of tags to their original tag shas. Args: refs: dict of refname -> sha of possible tags; defaults to all of the backend's refs. repo: optional Repo instance for getting peeled refs; defaults to the backend's repo, if available Returns: dict of peeled_sha -> tag_sha, where tag_sha is the sha of a tag whose peeled value is peeled_sha. """ if not self.has_capability(CAPABILITY_INCLUDE_TAG): return {} if refs is None: refs = self.repo.get_refs() if repo is None: repo = getattr(self.repo, "repo", None) if repo is None: # Bail if we don't have a Repo available; this is ok since # clients must be able to handle if the server doesn't include # all relevant tags. # TODO: fix behavior when missing return {} # TODO(jelmer): Integrate this with the refs logic in # Repo.fetch_objects tagged = {} for name, sha in refs.items(): peeled_sha = repo.get_peeled(name) if peeled_sha != sha: tagged[peeled_sha] = sha return tagged def handle(self): def write(x): return self.proto.write_sideband(SIDE_BAND_CHANNEL_DATA, x) graph_walker = _ProtocolGraphWalker( self, self.repo.object_store, self.repo.get_peeled, self.repo.refs.get_symrefs, ) wants = [] def wants_wrapper(refs): wants.extend(graph_walker.determine_wants(refs)) return wants objects_iter = self.repo.fetch_objects( - wants_wrapper, graph_walker, self.progress, get_tagged=self.get_tagged + wants_wrapper, + graph_walker, + self.progress, + get_tagged=self.get_tagged, ) # Note the fact that client is only processing responses related # to the have lines it sent, and any other data (including side- # band) will be be considered a fatal error. self._processing_have_lines = True # Did the process short-circuit (e.g. in a stateless RPC call)? Note # that the client still expects a 0-object pack in most cases. # Also, if it also happens that the object_iter is instantiated # with a graph walker with an implementation that talks over the # wire (which is this instance of this class) this will actually # iterate through everything and write things out to the wire. if len(wants) == 0: return # The provided haves are processed, and it is safe to send side- # band data now. self._processing_have_lines = False if not graph_walker.handle_done( not self.has_capability(CAPABILITY_NO_DONE), self._done_received ): return self.progress( ("counting objects: %d, done.\n" % len(objects_iter)).encode("ascii") ) write_pack_objects(ProtocolFile(None, write), objects_iter) # we are done self.proto.write_pkt_line(None) def _split_proto_line(line, allowed): """Split a line read from the wire. Args: line: The line read from the wire. allowed: An iterable of command names that should be allowed. Command names not listed below as possible return values will be ignored. If None, any commands from the possible return values are allowed. Returns: a tuple having one of the following forms: ('want', obj_id) ('have', obj_id) ('done', None) (None, None) (for a flush-pkt) Raises: UnexpectedCommandError: if the line cannot be parsed into one of the allowed return values. """ if not line: fields = [None] else: fields = line.rstrip(b"\n").split(b" ", 1) command = fields[0] if allowed is not None and command not in allowed: raise UnexpectedCommandError(command) if len(fields) == 1 and command in (COMMAND_DONE, None): return (command, None) elif len(fields) == 2: - if command in (COMMAND_WANT, COMMAND_HAVE, COMMAND_SHALLOW, COMMAND_UNSHALLOW): + if command in ( + COMMAND_WANT, + COMMAND_HAVE, + COMMAND_SHALLOW, + COMMAND_UNSHALLOW, + ): if not valid_hexsha(fields[1]): raise GitProtocolError("Invalid sha") return tuple(fields) elif command == COMMAND_DEEPEN: return command, int(fields[1]) raise GitProtocolError("Received invalid line from client: %r" % line) def _find_shallow(store, heads, depth): """Find shallow commits according to a given depth. Args: store: An ObjectStore for looking up objects. heads: Iterable of head SHAs to start walking from. depth: The depth of ancestors to include. A depth of one includes only the heads themselves. Returns: A tuple of (shallow, not_shallow), sets of SHAs that should be considered shallow and unshallow according to the arguments. Note that these sets may overlap if a commit is reachable along multiple paths. """ parents = {} def get_parents(sha): result = parents.get(sha, None) if not result: result = store[sha].parents parents[sha] = result return result todo = [] # stack of (sha, depth) for head_sha in heads: obj = store.peel_sha(head_sha) if isinstance(obj, Commit): todo.append((obj.id, 1)) not_shallow = set() shallow = set() while todo: sha, cur_depth = todo.pop() if cur_depth < depth: not_shallow.add(sha) new_depth = cur_depth + 1 todo.extend((p, new_depth) for p in get_parents(sha)) else: shallow.add(sha) return shallow, not_shallow def _want_satisfied(store, haves, want, earliest): o = store[want] pending = collections.deque([o]) known = set([want]) while pending: commit = pending.popleft() if commit.id in haves: return True if commit.type_name != b"commit": # non-commit wants are assumed to be satisfied continue for parent in commit.parents: if parent in known: continue known.add(parent) parent_obj = store[parent] # TODO: handle parents with later commit times than children if parent_obj.commit_time >= earliest: pending.append(parent_obj) return False def _all_wants_satisfied(store, haves, wants): """Check whether all the current wants are satisfied by a set of haves. Args: store: Object store to retrieve objects from haves: A set of commits we know the client has. wants: A set of commits the client wants Note: Wants are specified with set_wants rather than passed in since in the current interface they are determined outside this class. """ haves = set(haves) if haves: earliest = min([store[h].commit_time for h in haves]) else: earliest = 0 for want in wants: if not _want_satisfied(store, haves, want, earliest): return False return True class _ProtocolGraphWalker(object): """A graph walker that knows the git protocol. As a graph walker, this class implements ack(), next(), and reset(). It also contains some base methods for interacting with the wire and walking the commit tree. The work of determining which acks to send is passed on to the implementation instance stored in _impl. The reason for this is that we do not know at object creation time what ack level the protocol requires. A call to set_ack_type() is required to set up the implementation, before any calls to next() or ack() are made. """ def __init__(self, handler, object_store, get_peeled, get_symrefs): self.handler = handler self.store = object_store self.get_peeled = get_peeled self.get_symrefs = get_symrefs self.proto = handler.proto self.stateless_rpc = handler.stateless_rpc self.advertise_refs = handler.advertise_refs self._wants = [] self.shallow = set() self.client_shallow = set() self.unshallow = set() self._cached = False self._cache = [] self._cache_index = 0 self._impl = None def determine_wants(self, heads): """Determine the wants for a set of heads. The given heads are advertised to the client, who then specifies which refs they want using 'want' lines. This portion of the protocol is the same regardless of ack type, and in fact is used to set the ack type of the ProtocolGraphWalker. If the client has the 'shallow' capability, this method also reads and responds to the 'shallow' and 'deepen' lines from the client. These are not part of the wants per se, but they set up necessary state for walking the graph. Additionally, later code depends on this method consuming everything up to the first 'have' line. Args: heads: a dict of refname->SHA1 to advertise Returns: a list of SHA1s requested by the client """ symrefs = self.get_symrefs() values = set(heads.values()) if self.advertise_refs or not self.stateless_rpc: for i, (ref, sha) in enumerate(sorted(heads.items())): try: peeled_sha = self.get_peeled(ref) except KeyError: # Skip refs that are inaccessible # TODO(jelmer): Integrate with Repo.fetch_objects refs # logic. continue line = sha + b" " + ref if not i: line += b"\x00" + self.handler.capability_line( self.handler.capabilities() + symref_capabilities(symrefs.items()) ) self.proto.write_pkt_line(line + b"\n") if peeled_sha != sha: self.proto.write_pkt_line( peeled_sha + b" " + ref + ANNOTATED_TAG_SUFFIX + b"\n" ) # i'm done.. self.proto.write_pkt_line(None) if self.advertise_refs: return [] # Now client will sending want want want commands want = self.proto.read_pkt_line() if not want: return [] line, caps = extract_want_line_capabilities(want) self.handler.set_client_capabilities(caps) self.set_ack_type(ack_type(caps)) allowed = (COMMAND_WANT, COMMAND_SHALLOW, COMMAND_DEEPEN, None) command, sha = _split_proto_line(line, allowed) want_revs = [] while command == COMMAND_WANT: if sha not in values: raise GitProtocolError("Client wants invalid object %s" % sha) want_revs.append(sha) command, sha = self.read_proto_line(allowed) self.set_wants(want_revs) if command in (COMMAND_SHALLOW, COMMAND_DEEPEN): self.unread_proto_line(command, sha) self._handle_shallow_request(want_revs) if self.stateless_rpc and self.proto.eof(): # The client may close the socket at this point, expecting a # flush-pkt from the server. We might be ready to send a packfile # at this point, so we need to explicitly short-circuit in this # case. return [] return want_revs def unread_proto_line(self, command, value): if isinstance(value, int): value = str(value).encode("ascii") self.proto.unread_pkt_line(command + b" " + value) def ack(self, have_ref): if len(have_ref) != 40: raise ValueError("invalid sha %r" % have_ref) return self._impl.ack(have_ref) def reset(self): self._cached = True self._cache_index = 0 def next(self): if not self._cached: if not self._impl and self.stateless_rpc: return None return next(self._impl) self._cache_index += 1 if self._cache_index > len(self._cache): return None return self._cache[self._cache_index] __next__ = next def read_proto_line(self, allowed): """Read a line from the wire. Args: allowed: An iterable of command names that should be allowed. Returns: A tuple of (command, value); see _split_proto_line. Raises: UnexpectedCommandError: If an error occurred reading the line. """ return _split_proto_line(self.proto.read_pkt_line(), allowed) def _handle_shallow_request(self, wants): while True: command, val = self.read_proto_line((COMMAND_DEEPEN, COMMAND_SHALLOW)) if command == COMMAND_DEEPEN: depth = val break self.client_shallow.add(val) self.read_proto_line((None,)) # consume client's flush-pkt shallow, not_shallow = _find_shallow(self.store, wants, depth) # Update self.shallow instead of reassigning it since we passed a # reference to it before this method was called. self.shallow.update(shallow - not_shallow) new_shallow = self.shallow - self.client_shallow unshallow = self.unshallow = not_shallow & self.client_shallow for sha in sorted(new_shallow): self.proto.write_pkt_line(COMMAND_SHALLOW + b" " + sha) for sha in sorted(unshallow): self.proto.write_pkt_line(COMMAND_UNSHALLOW + b" " + sha) self.proto.write_pkt_line(None) def notify_done(self): # relay the message down to the handler. self.handler.notify_done() def send_ack(self, sha, ack_type=b""): if ack_type: ack_type = b" " + ack_type self.proto.write_pkt_line(b"ACK " + sha + ack_type + b"\n") def send_nak(self): self.proto.write_pkt_line(b"NAK\n") def handle_done(self, done_required, done_received): # Delegate this to the implementation. return self._impl.handle_done(done_required, done_received) def set_wants(self, wants): self._wants = wants def all_wants_satisfied(self, haves): """Check whether all the current wants are satisfied by a set of haves. Args: haves: A set of commits we know the client has. Note: Wants are specified with set_wants rather than passed in since in the current interface they are determined outside this class. """ return _all_wants_satisfied(self.store, haves, self._wants) def set_ack_type(self, ack_type): impl_classes = { MULTI_ACK: MultiAckGraphWalkerImpl, MULTI_ACK_DETAILED: MultiAckDetailedGraphWalkerImpl, SINGLE_ACK: SingleAckGraphWalkerImpl, } self._impl = impl_classes[ack_type](self) _GRAPH_WALKER_COMMANDS = (COMMAND_HAVE, COMMAND_DONE, None) class SingleAckGraphWalkerImpl(object): """Graph walker implementation that speaks the single-ack protocol.""" def __init__(self, walker): self.walker = walker self._common = [] def ack(self, have_ref): if not self._common: self.walker.send_ack(have_ref) self._common.append(have_ref) def next(self): command, sha = self.walker.read_proto_line(_GRAPH_WALKER_COMMANDS) if command in (None, COMMAND_DONE): # defer the handling of done self.walker.notify_done() return None elif command == COMMAND_HAVE: return sha __next__ = next def handle_done(self, done_required, done_received): if not self._common: self.walker.send_nak() if done_required and not done_received: # we are not done, especially when done is required; skip # the pack for this request and especially do not handle # the done. return False if not done_received and not self._common: # Okay we are not actually done then since the walker picked # up no haves. This is usually triggered when client attempts # to pull from a source that has no common base_commit. # See: test_server.MultiAckDetailedGraphWalkerImplTestCase.\ # test_multi_ack_stateless_nodone return False return True class MultiAckGraphWalkerImpl(object): """Graph walker implementation that speaks the multi-ack protocol.""" def __init__(self, walker): self.walker = walker self._found_base = False self._common = [] def ack(self, have_ref): self._common.append(have_ref) if not self._found_base: self.walker.send_ack(have_ref, b"continue") if self.walker.all_wants_satisfied(self._common): self._found_base = True # else we blind ack within next def next(self): while True: command, sha = self.walker.read_proto_line(_GRAPH_WALKER_COMMANDS) if command is None: self.walker.send_nak() # in multi-ack mode, a flush-pkt indicates the client wants to # flush but more have lines are still coming continue elif command == COMMAND_DONE: self.walker.notify_done() return None elif command == COMMAND_HAVE: if self._found_base: # blind ack self.walker.send_ack(sha, b"continue") return sha __next__ = next def handle_done(self, done_required, done_received): if done_required and not done_received: # we are not done, especially when done is required; skip # the pack for this request and especially do not handle # the done. return False if not done_received and not self._common: # Okay we are not actually done then since the walker picked # up no haves. This is usually triggered when client attempts # to pull from a source that has no common base_commit. # See: test_server.MultiAckDetailedGraphWalkerImplTestCase.\ # test_multi_ack_stateless_nodone return False # don't nak unless no common commits were found, even if not # everything is satisfied if self._common: self.walker.send_ack(self._common[-1]) else: self.walker.send_nak() return True class MultiAckDetailedGraphWalkerImpl(object): """Graph walker implementation speaking the multi-ack-detailed protocol.""" def __init__(self, walker): self.walker = walker self._common = [] def ack(self, have_ref): # Should only be called iff have_ref is common self._common.append(have_ref) self.walker.send_ack(have_ref, b"common") def next(self): while True: command, sha = self.walker.read_proto_line(_GRAPH_WALKER_COMMANDS) if command is None: if self.walker.all_wants_satisfied(self._common): self.walker.send_ack(self._common[-1], b"ready") self.walker.send_nak() if self.walker.stateless_rpc: # The HTTP version of this request a flush-pkt always # signifies an end of request, so we also return # nothing here as if we are done (but not really, as # it depends on whether no-done capability was # specified and that's handled in handle_done which # may or may not call post_nodone_check depending on # that). return None elif command == COMMAND_DONE: # Let the walker know that we got a done. self.walker.notify_done() break elif command == COMMAND_HAVE: # return the sha and let the caller ACK it with the # above ack method. return sha # don't nak unless no common commits were found, even if not # everything is satisfied __next__ = next def handle_done(self, done_required, done_received): if done_required and not done_received: # we are not done, especially when done is required; skip # the pack for this request and especially do not handle # the done. return False if not done_received and not self._common: # Okay we are not actually done then since the walker picked # up no haves. This is usually triggered when client attempts # to pull from a source that has no common base_commit. # See: test_server.MultiAckDetailedGraphWalkerImplTestCase.\ # test_multi_ack_stateless_nodone return False # don't nak unless no common commits were found, even if not # everything is satisfied if self._common: self.walker.send_ack(self._common[-1]) else: self.walker.send_nak() return True class ReceivePackHandler(PackHandler): """Protocol handler for downloading a pack from the client.""" def __init__(self, backend, args, proto, stateless_rpc=None, advertise_refs=False): super(ReceivePackHandler, self).__init__( backend, proto, stateless_rpc=stateless_rpc ) self.repo = backend.open_repository(args[0]) self.advertise_refs = advertise_refs @classmethod def capabilities(cls) -> Iterable[bytes]: return [ CAPABILITY_REPORT_STATUS, CAPABILITY_DELETE_REFS, CAPABILITY_QUIET, CAPABILITY_OFS_DELTA, CAPABILITY_SIDE_BAND_64K, CAPABILITY_NO_DONE, ] def _apply_pack( self, refs: List[Tuple[bytes, bytes, bytes]] ) -> List[Tuple[bytes, bytes]]: all_exceptions = ( IOError, OSError, ChecksumMismatch, ApplyDeltaError, AssertionError, socket.error, zlib.error, ObjectFormatException, ) status = [] will_send_pack = False for command in refs: if command[1] != ZERO_SHA: will_send_pack = True if will_send_pack: # TODO: more informative error messages than just the exception # string try: recv = getattr(self.proto, "recv", None) self.repo.object_store.add_thin_pack(self.proto.read, recv) status.append((b"unpack", b"ok")) except all_exceptions as e: status.append((b"unpack", str(e).replace("\n", "").encode("utf-8"))) # The pack may still have been moved in, but it may contain # broken objects. We trust a later GC to clean it up. else: # The git protocol want to find a status entry related to unpack # process even if no pack data has been sent. status.append((b"unpack", b"ok")) for oldsha, sha, ref in refs: ref_status = b"ok" try: if sha == ZERO_SHA: if CAPABILITY_DELETE_REFS not in self.capabilities(): raise GitProtocolError( "Attempted to delete refs without delete-refs " "capability." ) try: self.repo.refs.remove_if_equals(ref, oldsha) except all_exceptions: ref_status = b"failed to delete" else: try: self.repo.refs.set_if_equals(ref, oldsha, sha) except all_exceptions: ref_status = b"failed to write" except KeyError: ref_status = b"bad ref" status.append((ref, ref_status)) return status def _report_status(self, status: List[Tuple[bytes, bytes]]) -> None: if self.has_capability(CAPABILITY_SIDE_BAND_64K): writer = BufferedPktLineWriter( lambda d: self.proto.write_sideband(SIDE_BAND_CHANNEL_DATA, d) ) write = writer.write def flush(): writer.flush() self.proto.write_pkt_line(None) else: write = self.proto.write_pkt_line def flush(): pass for name, msg in status: if name == b"unpack": write(b"unpack " + msg + b"\n") elif msg == b"ok": write(b"ok " + name + b"\n") else: write(b"ng " + name + b" " + msg + b"\n") write(None) flush() def _on_post_receive(self, client_refs): hook = self.repo.hooks.get("post-receive", None) if not hook: return try: output = hook.execute(client_refs) if output: self.proto.write_sideband(SIDE_BAND_CHANNEL_PROGRESS, output) except HookError as err: self.proto.write_sideband(SIDE_BAND_CHANNEL_FATAL, repr(err)) def handle(self) -> None: if self.advertise_refs or not self.stateless_rpc: refs = sorted(self.repo.get_refs().items()) symrefs = sorted(self.repo.refs.get_symrefs().items()) if not refs: refs = [(CAPABILITIES_REF, ZERO_SHA)] self.proto.write_pkt_line( refs[0][1] + b" " + refs[0][0] + b"\0" + self.capability_line( self.capabilities() + symref_capabilities(symrefs) ) + b"\n" ) for i in range(1, len(refs)): ref = refs[i] self.proto.write_pkt_line(ref[1] + b" " + ref[0] + b"\n") self.proto.write_pkt_line(None) if self.advertise_refs: return client_refs = [] ref = self.proto.read_pkt_line() # if ref is none then client doesnt want to send us anything.. if ref is None: return ref, caps = extract_capabilities(ref) self.set_client_capabilities(caps) # client will now send us a list of (oldsha, newsha, ref) while ref: client_refs.append(ref.split()) ref = self.proto.read_pkt_line() # backend can now deal with this refs and read a pack using self.read status = self._apply_pack(client_refs) self._on_post_receive(client_refs) # when we have read all the pack from the client, send a status report # if the client asked for it if self.has_capability(CAPABILITY_REPORT_STATUS): self._report_status(status) class UploadArchiveHandler(Handler): def __init__(self, backend, args, proto, stateless_rpc=None): super(UploadArchiveHandler, self).__init__(backend, proto, stateless_rpc) self.repo = backend.open_repository(args[0]) def handle(self): def write(x): return self.proto.write_sideband(SIDE_BAND_CHANNEL_DATA, x) arguments = [] for pkt in self.proto.read_pkt_seq(): (key, value) = pkt.split(b" ", 1) if key != b"argument": raise GitProtocolError("unknown command %s" % key) arguments.append(value.rstrip(b"\n")) prefix = b"" format = "tar" i = 0 store = self.repo.object_store while i < len(arguments): argument = arguments[i] if argument == b"--prefix": i += 1 prefix = arguments[i] elif argument == b"--format": i += 1 format = arguments[i].decode("ascii") else: commit_sha = self.repo.refs[argument] tree = store[store[commit_sha].tree] i += 1 self.proto.write_pkt_line(b"ACK") self.proto.write_pkt_line(None) for chunk in tar_stream( store, tree, mtime=time.time(), prefix=prefix, format=format ): write(chunk) self.proto.write_pkt_line(None) # Default handler classes for git services. DEFAULT_HANDLERS = { b"git-upload-pack": UploadPackHandler, b"git-receive-pack": ReceivePackHandler, b"git-upload-archive": UploadArchiveHandler, } class TCPGitRequestHandler(socketserver.StreamRequestHandler): def __init__(self, handlers, *args, **kwargs): self.handlers = handlers socketserver.StreamRequestHandler.__init__(self, *args, **kwargs) def handle(self): proto = ReceivableProtocol(self.connection.recv, self.wfile.write) command, args = proto.read_cmd() logger.info("Handling %s request, args=%s", command, args) cls = self.handlers.get(command, None) if not callable(cls): raise GitProtocolError("Invalid service %s" % command) h = cls(self.server.backend, args, proto) h.handle() class TCPGitServer(socketserver.TCPServer): allow_reuse_address = True serve = socketserver.TCPServer.serve_forever def _make_handler(self, *args, **kwargs): return TCPGitRequestHandler(self.handlers, *args, **kwargs) def __init__(self, backend, listen_addr, port=TCP_GIT_PORT, handlers=None): self.handlers = dict(DEFAULT_HANDLERS) if handlers is not None: self.handlers.update(handlers) self.backend = backend logger.info("Listening for TCP connections on %s:%d", listen_addr, port) socketserver.TCPServer.__init__(self, (listen_addr, port), self._make_handler) def verify_request(self, request, client_address): logger.info("Handling request from %s", client_address) return True def handle_error(self, request, client_address): logger.exception( - "Exception happened during processing of request " "from %s", client_address + "Exception happened during processing of request " "from %s", + client_address, ) def main(argv=sys.argv): """Entry point for starting a TCP git server.""" import optparse parser = optparse.OptionParser() parser.add_option( "-l", "--listen_address", dest="listen_address", default="localhost", help="Binding IP address.", ) parser.add_option( "-p", "--port", dest="port", type=int, default=TCP_GIT_PORT, help="Binding TCP port.", ) options, args = parser.parse_args(argv) log_utils.default_logging_config() if len(args) > 1: gitdir = args[1] else: gitdir = "." # TODO(jelmer): Support git-daemon-export-ok and --export-all. backend = FileSystemBackend(gitdir) server = TCPGitServer(backend, options.listen_address, options.port) server.serve_forever() def serve_command( handler_cls, argv=sys.argv, backend=None, inf=sys.stdin, outf=sys.stdout ): """Serve a single command. This is mostly useful for the implementation of commands used by e.g. git+ssh. Args: handler_cls: `Handler` class to use for the request argv: execv-style command-line arguments. Defaults to sys.argv. backend: `Backend` to use inf: File-like object to read from, defaults to standard input. outf: File-like object to write to, defaults to standard output. Returns: Exit code for use with sys.exit. 0 on success, 1 on failure. """ if backend is None: backend = FileSystemBackend() def send_fn(data): outf.write(data) outf.flush() proto = Protocol(inf.read, send_fn) handler = handler_cls(backend, argv[1:], proto) # FIXME: Catch exceptions and write a single-line summary to outf. handler.handle() return 0 def generate_info_refs(repo): """Generate an info refs file.""" refs = repo.get_refs() return write_info_refs(refs, repo.object_store) def generate_objects_info_packs(repo): """Generate an index for for packs.""" for pack in repo.object_store.packs: yield (b"P " + os.fsencode(pack.data.filename) + b"\n") def update_server_info(repo): """Generate server info for dumb file access. This generates info/refs and objects/info/packs, similar to "git update-server-info". """ repo._put_named_file( os.path.join("info", "refs"), b"".join(generate_info_refs(repo)) ) repo._put_named_file( os.path.join("objects", "info", "packs"), b"".join(generate_objects_info_packs(repo)), ) if __name__ == "__main__": main() diff --git a/dulwich/tests/compat/server_utils.py b/dulwich/tests/compat/server_utils.py index fced52bb..7837e1b9 100644 --- a/dulwich/tests/compat/server_utils.py +++ b/dulwich/tests/compat/server_utils.py @@ -1,364 +1,368 @@ # server_utils.py -- Git server compatibility utilities # Copyright (C) 2010 Google, Inc. # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Utilities for testing git server compatibility.""" import errno import os import shutil import socket import tempfile from dulwich.repo import Repo from dulwich.objects import hex_to_sha from dulwich.protocol import ( CAPABILITY_SIDE_BAND_64K, ) from dulwich.server import ( ReceivePackHandler, ) from dulwich.tests.utils import ( tear_down_repo, ) from dulwich.tests.compat.utils import ( run_git_or_fail, ) from dulwich.tests.compat.utils import require_git_version class _StubRepo(object): """A stub repo that just contains a path to tear down.""" def __init__(self, name): temp_dir = tempfile.mkdtemp() self.path = os.path.join(temp_dir, name) os.mkdir(self.path) def close(self): pass def _get_shallow(repo): shallow_file = repo.get_named_file("shallow") if not shallow_file: return [] shallows = [] with shallow_file: for line in shallow_file: sha = line.strip() if not sha: continue hex_to_sha(sha) shallows.append(sha) return shallows class ServerTests(object): """Base tests for testing servers. Does not inherit from TestCase so tests are not automatically run. """ min_single_branch_version = ( 1, 7, 10, ) def import_repos(self): self._old_repo = self.import_repo("server_old.export") self._new_repo = self.import_repo("server_new.export") def url(self, port): return "%s://localhost:%s/" % (self.protocol, port) def branch_args(self, branches=None): if branches is None: branches = ["master", "branch"] return ["%s:%s" % (b, b) for b in branches] def test_push_to_dulwich(self): self.import_repos() self.assertReposNotEqual(self._old_repo, self._new_repo) port = self._start_server(self._old_repo) run_git_or_fail( - ["push", self.url(port)] + self.branch_args(), cwd=self._new_repo.path + ["push", self.url(port)] + self.branch_args(), + cwd=self._new_repo.path, ) self.assertReposEqual(self._old_repo, self._new_repo) def test_push_to_dulwich_no_op(self): self._old_repo = self.import_repo("server_old.export") self._new_repo = self.import_repo("server_old.export") self.assertReposEqual(self._old_repo, self._new_repo) port = self._start_server(self._old_repo) run_git_or_fail( - ["push", self.url(port)] + self.branch_args(), cwd=self._new_repo.path + ["push", self.url(port)] + self.branch_args(), + cwd=self._new_repo.path, ) self.assertReposEqual(self._old_repo, self._new_repo) def test_push_to_dulwich_remove_branch(self): self._old_repo = self.import_repo("server_old.export") self._new_repo = self.import_repo("server_old.export") self.assertReposEqual(self._old_repo, self._new_repo) port = self._start_server(self._old_repo) run_git_or_fail(["push", self.url(port), ":master"], cwd=self._new_repo.path) self.assertEqual(list(self._old_repo.get_refs().keys()), [b"refs/heads/branch"]) def test_fetch_from_dulwich(self): self.import_repos() self.assertReposNotEqual(self._old_repo, self._new_repo) port = self._start_server(self._new_repo) run_git_or_fail( - ["fetch", self.url(port)] + self.branch_args(), cwd=self._old_repo.path + ["fetch", self.url(port)] + self.branch_args(), + cwd=self._old_repo.path, ) # flush the pack cache so any new packs are picked up self._old_repo.object_store._pack_cache_time = 0 self.assertReposEqual(self._old_repo, self._new_repo) def test_fetch_from_dulwich_no_op(self): self._old_repo = self.import_repo("server_old.export") self._new_repo = self.import_repo("server_old.export") self.assertReposEqual(self._old_repo, self._new_repo) port = self._start_server(self._new_repo) run_git_or_fail( - ["fetch", self.url(port)] + self.branch_args(), cwd=self._old_repo.path + ["fetch", self.url(port)] + self.branch_args(), + cwd=self._old_repo.path, ) # flush the pack cache so any new packs are picked up self._old_repo.object_store._pack_cache_time = 0 self.assertReposEqual(self._old_repo, self._new_repo) def test_clone_from_dulwich_empty(self): old_repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, old_repo_dir) self._old_repo = Repo.init_bare(old_repo_dir) port = self._start_server(self._old_repo) new_repo_base_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, new_repo_base_dir) new_repo_dir = os.path.join(new_repo_base_dir, "empty_new") run_git_or_fail(["clone", self.url(port), new_repo_dir], cwd=new_repo_base_dir) new_repo = Repo(new_repo_dir) self.assertReposEqual(self._old_repo, new_repo) def test_lsremote_from_dulwich(self): self._repo = self.import_repo("server_old.export") port = self._start_server(self._repo) o = run_git_or_fail(["ls-remote", self.url(port)]) self.assertEqual(len(o.split(b"\n")), 4) def test_new_shallow_clone_from_dulwich(self): require_git_version(self.min_single_branch_version) self._source_repo = self.import_repo("server_new.export") self._stub_repo = _StubRepo("shallow") self.addCleanup(tear_down_repo, self._stub_repo) port = self._start_server(self._source_repo) # Fetch at depth 1 run_git_or_fail( [ "clone", "--mirror", "--depth=1", "--no-single-branch", self.url(port), self._stub_repo.path, ] ) clone = self._stub_repo = Repo(self._stub_repo.path) expected_shallow = [ b"35e0b59e187dd72a0af294aedffc213eaa4d03ff", b"514dc6d3fbfe77361bcaef320c4d21b72bc10be9", ] self.assertEqual(expected_shallow, _get_shallow(clone)) self.assertReposNotEqual(clone, self._source_repo) def test_shallow_clone_from_git_is_identical(self): require_git_version(self.min_single_branch_version) self._source_repo = self.import_repo("server_new.export") self._stub_repo_git = _StubRepo("shallow-git") self.addCleanup(tear_down_repo, self._stub_repo_git) self._stub_repo_dw = _StubRepo("shallow-dw") self.addCleanup(tear_down_repo, self._stub_repo_dw) # shallow clone using stock git, then using dulwich run_git_or_fail( [ "clone", "--mirror", "--depth=1", "--no-single-branch", "file://" + self._source_repo.path, self._stub_repo_git.path, ] ) port = self._start_server(self._source_repo) run_git_or_fail( [ "clone", "--mirror", "--depth=1", "--no-single-branch", self.url(port), self._stub_repo_dw.path, ] ) # compare the two clones; they should be equal self.assertReposEqual( Repo(self._stub_repo_git.path), Repo(self._stub_repo_dw.path) ) def test_fetch_same_depth_into_shallow_clone_from_dulwich(self): require_git_version(self.min_single_branch_version) self._source_repo = self.import_repo("server_new.export") self._stub_repo = _StubRepo("shallow") self.addCleanup(tear_down_repo, self._stub_repo) port = self._start_server(self._source_repo) # Fetch at depth 2 run_git_or_fail( [ "clone", "--mirror", "--depth=2", "--no-single-branch", self.url(port), self._stub_repo.path, ] ) clone = self._stub_repo = Repo(self._stub_repo.path) # Fetching at the same depth is a no-op. run_git_or_fail( ["fetch", "--depth=2", self.url(port)] + self.branch_args(), cwd=self._stub_repo.path, ) expected_shallow = [ b"94de09a530df27ac3bb613aaecdd539e0a0655e1", b"da5cd81e1883c62a25bb37c4d1f8ad965b29bf8d", ] self.assertEqual(expected_shallow, _get_shallow(clone)) self.assertReposNotEqual(clone, self._source_repo) def test_fetch_full_depth_into_shallow_clone_from_dulwich(self): require_git_version(self.min_single_branch_version) self._source_repo = self.import_repo("server_new.export") self._stub_repo = _StubRepo("shallow") self.addCleanup(tear_down_repo, self._stub_repo) port = self._start_server(self._source_repo) # Fetch at depth 2 run_git_or_fail( [ "clone", "--mirror", "--depth=2", "--no-single-branch", self.url(port), self._stub_repo.path, ] ) clone = self._stub_repo = Repo(self._stub_repo.path) # Fetching at the same depth is a no-op. run_git_or_fail( ["fetch", "--depth=2", self.url(port)] + self.branch_args(), cwd=self._stub_repo.path, ) # The whole repo only has depth 4, so it should equal server_new. run_git_or_fail( ["fetch", "--depth=4", self.url(port)] + self.branch_args(), cwd=self._stub_repo.path, ) self.assertEqual([], _get_shallow(clone)) self.assertReposEqual(clone, self._source_repo) def test_fetch_from_dulwich_issue_88_standard(self): # Basically an integration test to see that the ACK/NAK # generation works on repos with common head. self._source_repo = self.import_repo("issue88_expect_ack_nak_server.export") self._client_repo = self.import_repo("issue88_expect_ack_nak_client.export") port = self._start_server(self._source_repo) run_git_or_fail(["fetch", self.url(port), "master"], cwd=self._client_repo.path) self.assertObjectStoreEqual( self._source_repo.object_store, self._client_repo.object_store ) def test_fetch_from_dulwich_issue_88_alternative(self): # likewise, but the case where the two repos have no common parent self._source_repo = self.import_repo("issue88_expect_ack_nak_other.export") self._client_repo = self.import_repo("issue88_expect_ack_nak_client.export") port = self._start_server(self._source_repo) self.assertRaises( KeyError, self._client_repo.get_object, b"02a14da1fc1fc13389bbf32f0af7d8899f2b2323", ) run_git_or_fail(["fetch", self.url(port), "master"], cwd=self._client_repo.path) self.assertEqual( b"commit", self._client_repo.get_object( b"02a14da1fc1fc13389bbf32f0af7d8899f2b2323" ).type_name, ) def test_push_to_dulwich_issue_88_standard(self): # Same thing, but we reverse the role of the server/client # and do a push instead. self._source_repo = self.import_repo("issue88_expect_ack_nak_client.export") self._client_repo = self.import_repo("issue88_expect_ack_nak_server.export") port = self._start_server(self._source_repo) run_git_or_fail(["push", self.url(port), "master"], cwd=self._client_repo.path) self.assertReposEqual(self._source_repo, self._client_repo) # TODO(dborowitz): Come up with a better way of testing various permutations of # capabilities. The only reason it is the way it is now is that side-band-64k # was only recently introduced into git-receive-pack. class NoSideBand64kReceivePackHandler(ReceivePackHandler): """ReceivePackHandler that does not support side-band-64k.""" @classmethod def capabilities(cls): return [ c for c in ReceivePackHandler.capabilities() if c != CAPABILITY_SIDE_BAND_64K ] def ignore_error(error): """Check whether this error is safe to ignore.""" (e_type, e_value, e_tb) = error return issubclass(e_type, socket.error) and e_value[0] in ( errno.ECONNRESET, errno.EPIPE, ) diff --git a/dulwich/tests/compat/test_client.py b/dulwich/tests/compat/test_client.py index e89b2ce4..1cb3cc99 100644 --- a/dulwich/tests/compat/test_client.py +++ b/dulwich/tests/compat/test_client.py @@ -1,644 +1,661 @@ # test_client.py -- Compatibilty tests for git client. # Copyright (C) 2010 Google, Inc. # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Compatibilty tests between the Dulwich client and the cgit server.""" import copy from io import BytesIO import os import select import signal import stat import subprocess import sys import tarfile import tempfile import threading from urllib.parse import unquote import http.server from dulwich import ( client, file, index, protocol, objects, repo, ) from dulwich.tests import ( SkipTest, expectedFailure, ) from dulwich.tests.compat.utils import ( CompatTestCase, check_for_daemon, import_repo_to_dir, rmtree_ro, run_git_or_fail, _DEFAULT_GIT, ) if sys.platform == "win32": import ctypes class DulwichClientTestBase(object): """Tests for client/server compatibility.""" def setUp(self): self.gitroot = os.path.dirname( import_repo_to_dir("server_new.export").rstrip(os.sep) ) self.dest = os.path.join(self.gitroot, "dest") file.ensure_dir_exists(self.dest) run_git_or_fail(["init", "--quiet", "--bare"], cwd=self.dest) def tearDown(self): rmtree_ro(self.gitroot) def assertDestEqualsSrc(self): repo_dir = os.path.join(self.gitroot, "server_new.export") dest_repo_dir = os.path.join(self.gitroot, "dest") with repo.Repo(repo_dir) as src: with repo.Repo(dest_repo_dir) as dest: self.assertReposEqual(src, dest) def _client(self): raise NotImplementedError() def _build_path(self): raise NotImplementedError() def _do_send_pack(self): c = self._client() srcpath = os.path.join(self.gitroot, "server_new.export") with repo.Repo(srcpath) as src: sendrefs = dict(src.get_refs()) del sendrefs[b"HEAD"] c.send_pack( - self._build_path("/dest"), lambda _: sendrefs, src.generate_pack_data + self._build_path("/dest"), + lambda _: sendrefs, + src.generate_pack_data, ) def test_send_pack(self): self._do_send_pack() self.assertDestEqualsSrc() def test_send_pack_nothing_to_send(self): self._do_send_pack() self.assertDestEqualsSrc() # nothing to send, but shouldn't raise either. self._do_send_pack() @staticmethod def _add_file(repo, tree_id, filename, contents): tree = repo[tree_id] blob = objects.Blob() blob.data = contents.encode("utf-8") repo.object_store.add_object(blob) tree.add(filename.encode("utf-8"), stat.S_IFREG | 0o644, blob.id) repo.object_store.add_object(tree) return tree.id def test_send_pack_from_shallow_clone(self): c = self._client() server_new_path = os.path.join(self.gitroot, "server_new.export") run_git_or_fail(["config", "http.uploadpack", "true"], cwd=server_new_path) run_git_or_fail(["config", "http.receivepack", "true"], cwd=server_new_path) remote_path = self._build_path("/server_new.export") with repo.Repo(self.dest) as local: result = c.fetch(remote_path, local, depth=1) for r in result.refs.items(): local.refs.set_if_equals(r[0], None, r[1]) tree_id = local[local.head()].tree for filename, contents in [ ("bar", "bar contents"), ("zop", "zop contents"), ]: tree_id = self._add_file(local, tree_id, filename, contents) commit_id = local.do_commit( message=b"add " + filename.encode("utf-8"), committer=b"Joe Example ", tree=tree_id, ) sendrefs = dict(local.get_refs()) del sendrefs[b"HEAD"] c.send_pack(remote_path, lambda _: sendrefs, local.generate_pack_data) with repo.Repo(server_new_path) as remote: self.assertEqual(remote.head(), commit_id) def test_send_without_report_status(self): c = self._client() c._send_capabilities.remove(b"report-status") srcpath = os.path.join(self.gitroot, "server_new.export") with repo.Repo(srcpath) as src: sendrefs = dict(src.get_refs()) del sendrefs[b"HEAD"] c.send_pack( - self._build_path("/dest"), lambda _: sendrefs, src.generate_pack_data + self._build_path("/dest"), + lambda _: sendrefs, + src.generate_pack_data, ) self.assertDestEqualsSrc() def make_dummy_commit(self, dest): b = objects.Blob.from_string(b"hi") dest.object_store.add_object(b) t = index.commit_tree(dest.object_store, [(b"hi", b.id, 0o100644)]) c = objects.Commit() c.author = c.committer = b"Foo Bar " c.author_time = c.commit_time = 0 c.author_timezone = c.commit_timezone = 0 c.message = b"hi" c.tree = t dest.object_store.add_object(c) return c.id def disable_ff_and_make_dummy_commit(self): # disable non-fast-forward pushes to the server dest = repo.Repo(os.path.join(self.gitroot, "dest")) run_git_or_fail( ["config", "receive.denyNonFastForwards", "true"], cwd=dest.path ) commit_id = self.make_dummy_commit(dest) return dest, commit_id def compute_send(self, src): sendrefs = dict(src.get_refs()) del sendrefs[b"HEAD"] return sendrefs, src.generate_pack_data def test_send_pack_one_error(self): dest, dummy_commit = self.disable_ff_and_make_dummy_commit() dest.refs[b"refs/heads/master"] = dummy_commit repo_dir = os.path.join(self.gitroot, "server_new.export") with repo.Repo(repo_dir) as src: sendrefs, gen_pack = self.compute_send(src) c = self._client() result = c.send_pack( self._build_path("/dest"), lambda _: sendrefs, gen_pack ) self.assertEqual( - {b"refs/heads/branch": None, b"refs/heads/master": "non-fast-forward"}, + { + b"refs/heads/branch": None, + b"refs/heads/master": "non-fast-forward", + }, result.ref_status, ) def test_send_pack_multiple_errors(self): dest, dummy = self.disable_ff_and_make_dummy_commit() # set up for two non-ff errors branch, master = b"refs/heads/branch", b"refs/heads/master" dest.refs[branch] = dest.refs[master] = dummy repo_dir = os.path.join(self.gitroot, "server_new.export") with repo.Repo(repo_dir) as src: sendrefs, gen_pack = self.compute_send(src) c = self._client() result = c.send_pack( self._build_path("/dest"), lambda _: sendrefs, gen_pack ) self.assertEqual( {branch: "non-fast-forward", master: "non-fast-forward"}, result.ref_status, ) def test_archive(self): c = self._client() f = BytesIO() c.archive(self._build_path("/server_new.export"), b"HEAD", f.write) f.seek(0) tf = tarfile.open(fileobj=f) self.assertEqual(["baz", "foo"], tf.getnames()) def test_fetch_pack(self): c = self._client() with repo.Repo(os.path.join(self.gitroot, "dest")) as dest: result = c.fetch(self._build_path("/server_new.export"), dest) for r in result.refs.items(): dest.refs.set_if_equals(r[0], None, r[1]) self.assertDestEqualsSrc() def test_fetch_pack_depth(self): c = self._client() with repo.Repo(os.path.join(self.gitroot, "dest")) as dest: result = c.fetch(self._build_path("/server_new.export"), dest, depth=1) for r in result.refs.items(): dest.refs.set_if_equals(r[0], None, r[1]) self.assertEqual( dest.get_shallow(), set( [ b"35e0b59e187dd72a0af294aedffc213eaa4d03ff", b"514dc6d3fbfe77361bcaef320c4d21b72bc10be9", ] ), ) def test_repeat(self): c = self._client() with repo.Repo(os.path.join(self.gitroot, "dest")) as dest: result = c.fetch(self._build_path("/server_new.export"), dest) for r in result.refs.items(): dest.refs.set_if_equals(r[0], None, r[1]) self.assertDestEqualsSrc() result = c.fetch(self._build_path("/server_new.export"), dest) for r in result.refs.items(): dest.refs.set_if_equals(r[0], None, r[1]) self.assertDestEqualsSrc() def test_fetch_empty_pack(self): c = self._client() with repo.Repo(os.path.join(self.gitroot, "dest")) as dest: result = c.fetch(self._build_path("/server_new.export"), dest) for r in result.refs.items(): dest.refs.set_if_equals(r[0], None, r[1]) self.assertDestEqualsSrc() def dw(refs): return list(refs.values()) result = c.fetch( - self._build_path("/server_new.export"), dest, determine_wants=dw + self._build_path("/server_new.export"), + dest, + determine_wants=dw, ) for r in result.refs.items(): dest.refs.set_if_equals(r[0], None, r[1]) self.assertDestEqualsSrc() def test_incremental_fetch_pack(self): self.test_fetch_pack() dest, dummy = self.disable_ff_and_make_dummy_commit() dest.refs[b"refs/heads/master"] = dummy c = self._client() repo_dir = os.path.join(self.gitroot, "server_new.export") with repo.Repo(repo_dir) as dest: result = c.fetch(self._build_path("/dest"), dest) for r in result.refs.items(): dest.refs.set_if_equals(r[0], None, r[1]) self.assertDestEqualsSrc() def test_fetch_pack_no_side_band_64k(self): c = self._client() c._fetch_capabilities.remove(b"side-band-64k") with repo.Repo(os.path.join(self.gitroot, "dest")) as dest: result = c.fetch(self._build_path("/server_new.export"), dest) for r in result.refs.items(): dest.refs.set_if_equals(r[0], None, r[1]) self.assertDestEqualsSrc() def test_fetch_pack_zero_sha(self): # zero sha1s are already present on the client, and should # be ignored c = self._client() with repo.Repo(os.path.join(self.gitroot, "dest")) as dest: result = c.fetch( self._build_path("/server_new.export"), dest, lambda refs: [protocol.ZERO_SHA], ) for r in result.refs.items(): dest.refs.set_if_equals(r[0], None, r[1]) def test_send_remove_branch(self): with repo.Repo(os.path.join(self.gitroot, "dest")) as dest: dummy_commit = self.make_dummy_commit(dest) dest.refs[b"refs/heads/master"] = dummy_commit dest.refs[b"refs/heads/abranch"] = dummy_commit sendrefs = dict(dest.refs) sendrefs[b"refs/heads/abranch"] = b"00" * 20 del sendrefs[b"HEAD"] def gen_pack(have, want, ofs_delta=False): return 0, [] c = self._client() self.assertEqual(dest.refs[b"refs/heads/abranch"], dummy_commit) c.send_pack(self._build_path("/dest"), lambda _: sendrefs, gen_pack) self.assertFalse(b"refs/heads/abranch" in dest.refs) def test_send_new_branch_empty_pack(self): with repo.Repo(os.path.join(self.gitroot, "dest")) as dest: dummy_commit = self.make_dummy_commit(dest) dest.refs[b"refs/heads/master"] = dummy_commit dest.refs[b"refs/heads/abranch"] = dummy_commit sendrefs = {b"refs/heads/bbranch": dummy_commit} def gen_pack(have, want, ofs_delta=False): return 0, [] c = self._client() self.assertEqual(dest.refs[b"refs/heads/abranch"], dummy_commit) c.send_pack(self._build_path("/dest"), lambda _: sendrefs, gen_pack) self.assertEqual(dummy_commit, dest.refs[b"refs/heads/abranch"]) def test_get_refs(self): c = self._client() refs = c.get_refs(self._build_path("/server_new.export")) repo_dir = os.path.join(self.gitroot, "server_new.export") with repo.Repo(repo_dir) as dest: self.assertDictEqual(dest.refs.as_dict(), refs) class DulwichTCPClientTest(CompatTestCase, DulwichClientTestBase): def setUp(self): CompatTestCase.setUp(self) DulwichClientTestBase.setUp(self) if check_for_daemon(limit=1): raise SkipTest( "git-daemon was already running on port %s" % protocol.TCP_GIT_PORT ) fd, self.pidfile = tempfile.mkstemp( prefix="dulwich-test-git-client", suffix=".pid" ) os.fdopen(fd).close() args = [ _DEFAULT_GIT, "daemon", "--verbose", "--export-all", "--pid-file=%s" % self.pidfile, "--base-path=%s" % self.gitroot, "--enable=receive-pack", "--enable=upload-archive", "--listen=localhost", "--reuseaddr", self.gitroot, ] self.process = subprocess.Popen( - args, cwd=self.gitroot, stdout=subprocess.PIPE, stderr=subprocess.PIPE + args, + cwd=self.gitroot, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, ) if not check_for_daemon(): raise SkipTest("git-daemon failed to start") def tearDown(self): with open(self.pidfile) as f: pid = int(f.read().strip()) if sys.platform == "win32": PROCESS_TERMINATE = 1 handle = ctypes.windll.kernel32.OpenProcess(PROCESS_TERMINATE, False, pid) ctypes.windll.kernel32.TerminateProcess(handle, -1) ctypes.windll.kernel32.CloseHandle(handle) else: try: os.kill(pid, signal.SIGKILL) os.unlink(self.pidfile) except (OSError, IOError): pass self.process.wait() self.process.stdout.close() self.process.stderr.close() DulwichClientTestBase.tearDown(self) CompatTestCase.tearDown(self) def _client(self): return client.TCPGitClient("localhost") def _build_path(self, path): return path if sys.platform == "win32": @expectedFailure def test_fetch_pack_no_side_band_64k(self): DulwichClientTestBase.test_fetch_pack_no_side_band_64k(self) class TestSSHVendor(object): @staticmethod def run_command( - host, command, username=None, port=None, password=None, key_filename=None + host, + command, + username=None, + port=None, + password=None, + key_filename=None, ): cmd, path = command.split(" ") cmd = cmd.split("-", 1) path = path.replace("'", "") p = subprocess.Popen( cmd + [path], bufsize=0, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) return client.SubprocessWrapper(p) class DulwichMockSSHClientTest(CompatTestCase, DulwichClientTestBase): def setUp(self): CompatTestCase.setUp(self) DulwichClientTestBase.setUp(self) self.real_vendor = client.get_ssh_vendor client.get_ssh_vendor = TestSSHVendor def tearDown(self): DulwichClientTestBase.tearDown(self) CompatTestCase.tearDown(self) client.get_ssh_vendor = self.real_vendor def _client(self): return client.SSHGitClient("localhost") def _build_path(self, path): return self.gitroot + path class DulwichSubprocessClientTest(CompatTestCase, DulwichClientTestBase): def setUp(self): CompatTestCase.setUp(self) DulwichClientTestBase.setUp(self) def tearDown(self): DulwichClientTestBase.tearDown(self) CompatTestCase.tearDown(self) def _client(self): return client.SubprocessGitClient() def _build_path(self, path): return self.gitroot + path class GitHTTPRequestHandler(http.server.SimpleHTTPRequestHandler): """HTTP Request handler that calls out to 'git http-backend'.""" # Make rfile unbuffered -- we need to read one line and then pass # the rest to a subprocess, so we can't use buffered input. rbufsize = 0 def do_POST(self): self.run_backend() def do_GET(self): self.run_backend() def send_head(self): return self.run_backend() def log_request(self, code="-", size="-"): # Let's be quiet, the test suite is noisy enough already pass - def run_backend(self): + def run_backend(self): # noqa: C901 """Call out to git http-backend.""" # Based on CGIHTTPServer.CGIHTTPRequestHandler.run_cgi: # Copyright (c) 2001-2010 Python Software Foundation; # All Rights Reserved # Licensed under the Python Software Foundation License. rest = self.path # find an explicit query string, if present. i = rest.rfind("?") if i >= 0: rest, query = rest[:i], rest[i + 1 :] else: query = "" env = copy.deepcopy(os.environ) env["SERVER_SOFTWARE"] = self.version_string() env["SERVER_NAME"] = self.server.server_name env["GATEWAY_INTERFACE"] = "CGI/1.1" env["SERVER_PROTOCOL"] = self.protocol_version env["SERVER_PORT"] = str(self.server.server_port) env["GIT_PROJECT_ROOT"] = self.server.root_path env["GIT_HTTP_EXPORT_ALL"] = "1" env["REQUEST_METHOD"] = self.command uqrest = unquote(rest) env["PATH_INFO"] = uqrest env["SCRIPT_NAME"] = "/" if query: env["QUERY_STRING"] = query host = self.address_string() if host != self.client_address[0]: env["REMOTE_HOST"] = host env["REMOTE_ADDR"] = self.client_address[0] authorization = self.headers.get("authorization") if authorization: authorization = authorization.split() if len(authorization) == 2: import base64 import binascii env["AUTH_TYPE"] = authorization[0] if authorization[0].lower() == "basic": try: authorization = base64.decodestring(authorization[1]) except binascii.Error: pass else: authorization = authorization.split(":") if len(authorization) == 2: env["REMOTE_USER"] = authorization[0] # XXX REMOTE_IDENT content_type = self.headers.get("content-type") if content_type: env["CONTENT_TYPE"] = content_type length = self.headers.get("content-length") if length: env["CONTENT_LENGTH"] = length referer = self.headers.get("referer") if referer: env["HTTP_REFERER"] = referer accept = [] for line in self.headers.getallmatchingheaders("accept"): if line[:1] in "\t\n\r ": accept.append(line.strip()) else: accept = accept + line[7:].split(",") env["HTTP_ACCEPT"] = ",".join(accept) ua = self.headers.get("user-agent") if ua: env["HTTP_USER_AGENT"] = ua co = self.headers.get("cookie") if co: env["HTTP_COOKIE"] = co # XXX Other HTTP_* headers # Since we're setting the env in the parent, provide empty # values to override previously set values for k in ( "QUERY_STRING", "REMOTE_HOST", "CONTENT_LENGTH", "HTTP_USER_AGENT", "HTTP_COOKIE", "HTTP_REFERER", ): env.setdefault(k, "") self.wfile.write(b"HTTP/1.1 200 Script output follows\r\n") self.wfile.write(("Server: %s\r\n" % self.server.server_name).encode("ascii")) self.wfile.write(("Date: %s\r\n" % self.date_time_string()).encode("ascii")) decoded_query = query.replace("+", " ") try: nbytes = int(length) except (TypeError, ValueError): nbytes = 0 if self.command.lower() == "post" and nbytes > 0: data = self.rfile.read(nbytes) else: data = None env["CONTENT_LENGTH"] = "0" # throw away additional data [see bug #427345] while select.select([self.rfile._sock], [], [], 0)[0]: if not self.rfile._sock.recv(1): break args = ["http-backend"] if "=" not in decoded_query: args.append(decoded_query) stdout = run_git_or_fail(args, input=data, env=env, stderr=subprocess.PIPE) self.wfile.write(stdout) class HTTPGitServer(http.server.HTTPServer): allow_reuse_address = True def __init__(self, server_address, root_path): http.server.HTTPServer.__init__(self, server_address, GitHTTPRequestHandler) self.root_path = root_path self.server_name = "localhost" def get_url(self): return "http://%s:%s/" % (self.server_name, self.server_port) class DulwichHttpClientTest(CompatTestCase, DulwichClientTestBase): min_git_version = (1, 7, 0, 2) def setUp(self): CompatTestCase.setUp(self) DulwichClientTestBase.setUp(self) self._httpd = HTTPGitServer(("localhost", 0), self.gitroot) self.addCleanup(self._httpd.shutdown) threading.Thread(target=self._httpd.serve_forever).start() run_git_or_fail(["config", "http.uploadpack", "true"], cwd=self.dest) run_git_or_fail(["config", "http.receivepack", "true"], cwd=self.dest) def tearDown(self): DulwichClientTestBase.tearDown(self) CompatTestCase.tearDown(self) self._httpd.shutdown() self._httpd.socket.close() def _client(self): return client.HttpGitClient(self._httpd.get_url()) def _build_path(self, path): return path def test_archive(self): raise SkipTest("exporting archives not supported over http") diff --git a/dulwich/tests/compat/test_pack.py b/dulwich/tests/compat/test_pack.py index cad8667a..8ae614f5 100644 --- a/dulwich/tests/compat/test_pack.py +++ b/dulwich/tests/compat/test_pack.py @@ -1,163 +1,172 @@ # test_pack.py -- Compatibility tests for git packs. # Copyright (C) 2010 Google, Inc. # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Compatibility tests for git packs.""" import binascii import os import re import shutil import tempfile from dulwich.pack import ( write_pack, ) from dulwich.objects import ( Blob, ) from dulwich.tests import ( SkipTest, ) from dulwich.tests.test_pack import ( a_sha, pack1_sha, PackTests, ) from dulwich.tests.compat.utils import ( require_git_version, run_git_or_fail, ) _NON_DELTA_RE = re.compile(b"non delta: (?P\\d+) objects") def _git_verify_pack_object_list(output): pack_shas = set() for line in output.splitlines(): sha = line[:40] try: binascii.unhexlify(sha) except (TypeError, binascii.Error): continue # non-sha line pack_shas.add(sha) return pack_shas class TestPack(PackTests): """Compatibility tests for reading and writing pack files.""" def setUp(self): require_git_version((1, 5, 0)) super(TestPack, self).setUp() self._tempdir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self._tempdir) def test_copy(self): with self.get_pack(pack1_sha) as origpack: self.assertSucceeds(origpack.index.check) pack_path = os.path.join(self._tempdir, "Elch") write_pack(pack_path, origpack.pack_tuples()) output = run_git_or_fail(["verify-pack", "-v", pack_path]) orig_shas = set(o.id for o in origpack.iterobjects()) self.assertEqual(orig_shas, _git_verify_pack_object_list(output)) def test_deltas_work(self): with self.get_pack(pack1_sha) as orig_pack: orig_blob = orig_pack[a_sha] new_blob = Blob() new_blob.data = orig_blob.data + b"x" all_to_pack = list(orig_pack.pack_tuples()) + [(new_blob, None)] pack_path = os.path.join(self._tempdir, "pack_with_deltas") write_pack(pack_path, all_to_pack, deltify=True) output = run_git_or_fail(["verify-pack", "-v", pack_path]) self.assertEqual( - set(x[0].id for x in all_to_pack), _git_verify_pack_object_list(output) + set(x[0].id for x in all_to_pack), + _git_verify_pack_object_list(output), ) # We specifically made a new blob that should be a delta # against the blob a_sha, so make sure we really got only 3 # non-delta objects: got_non_delta = int(_NON_DELTA_RE.search(output).group("non_delta")) self.assertEqual( - 3, got_non_delta, "Expected 3 non-delta objects, got %d" % got_non_delta + 3, + got_non_delta, + "Expected 3 non-delta objects, got %d" % got_non_delta, ) def test_delta_medium_object(self): # This tests an object set that will have a copy operation # 2**20 in size. with self.get_pack(pack1_sha) as orig_pack: orig_blob = orig_pack[a_sha] new_blob = Blob() new_blob.data = orig_blob.data + (b"x" * 2 ** 20) new_blob_2 = Blob() new_blob_2.data = new_blob.data + b"y" all_to_pack = list(orig_pack.pack_tuples()) + [ (new_blob, None), (new_blob_2, None), ] pack_path = os.path.join(self._tempdir, "pack_with_deltas") write_pack(pack_path, all_to_pack, deltify=True) output = run_git_or_fail(["verify-pack", "-v", pack_path]) self.assertEqual( - set(x[0].id for x in all_to_pack), _git_verify_pack_object_list(output) + set(x[0].id for x in all_to_pack), + _git_verify_pack_object_list(output), ) # We specifically made a new blob that should be a delta # against the blob a_sha, so make sure we really got only 3 # non-delta objects: got_non_delta = int(_NON_DELTA_RE.search(output).group("non_delta")) self.assertEqual( - 3, got_non_delta, "Expected 3 non-delta objects, got %d" % got_non_delta + 3, + got_non_delta, + "Expected 3 non-delta objects, got %d" % got_non_delta, ) # We expect one object to have a delta chain length of two # (new_blob_2), so let's verify that actually happens: self.assertIn(b"chain length = 2", output) # This test is SUPER slow: over 80 seconds on a 2012-era # laptop. This is because SequenceMatcher is worst-case quadratic # on the input size. It's impractical to produce deltas for # objects this large, but it's still worth doing the right thing # when it happens. def test_delta_large_object(self): # This tests an object set that will have a copy operation # 2**25 in size. This is a copy large enough that it requires # two copy operations in git's binary delta format. raise SkipTest("skipping slow, large test") with self.get_pack(pack1_sha) as orig_pack: new_blob = Blob() new_blob.data = "big blob" + ("x" * 2 ** 25) new_blob_2 = Blob() new_blob_2.data = new_blob.data + "y" all_to_pack = list(orig_pack.pack_tuples()) + [ (new_blob, None), (new_blob_2, None), ] pack_path = os.path.join(self._tempdir, "pack_with_deltas") write_pack(pack_path, all_to_pack, deltify=True) output = run_git_or_fail(["verify-pack", "-v", pack_path]) self.assertEqual( - set(x[0].id for x in all_to_pack), _git_verify_pack_object_list(output) + set(x[0].id for x in all_to_pack), + _git_verify_pack_object_list(output), ) # We specifically made a new blob that should be a delta # against the blob a_sha, so make sure we really got only 4 # non-delta objects: got_non_delta = int(_NON_DELTA_RE.search(output).group("non_delta")) self.assertEqual( - 4, got_non_delta, "Expected 4 non-delta objects, got %d" % got_non_delta + 4, + got_non_delta, + "Expected 4 non-delta objects, got %d" % got_non_delta, ) diff --git a/dulwich/tests/test_client.py b/dulwich/tests/test_client.py index b2d27a64..b844f5e8 100644 --- a/dulwich/tests/test_client.py +++ b/dulwich/tests/test_client.py @@ -1,1472 +1,1489 @@ # test_client.py -- Tests for the git protocol, client side # Copyright (C) 2009 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # from io import BytesIO import base64 import os import sys import shutil import tempfile import warnings from urllib.parse import ( quote as urlquote, urlparse, ) import dulwich from dulwich import ( client, ) from dulwich.client import ( InvalidWants, LocalGitClient, TraditionalGitClient, TCPGitClient, SSHGitClient, HttpGitClient, FetchPackResult, ReportStatusParser, SendPackError, StrangeHostname, SubprocessSSHVendor, PLinkSSHVendor, HangupException, GitProtocolError, check_wants, default_urllib3_manager, get_credentials_from_store, get_transport_and_path, get_transport_and_path_from_url, parse_rsync_url, _remote_error_from_stderr, ) from dulwich.config import ( ConfigDict, ) from dulwich.tests import ( TestCase, ) from dulwich.protocol import ( TCP_GIT_PORT, Protocol, ) from dulwich.pack import ( pack_objects_to_data, write_pack_data, write_pack_objects, ) from dulwich.objects import Commit, Tree from dulwich.repo import ( MemoryRepo, Repo, ) from dulwich.tests import skipIf from dulwich.tests.utils import ( open_repo, tear_down_repo, setup_warning_catcher, ) class DummyClient(TraditionalGitClient): def __init__(self, can_read, read, write): self.can_read = can_read self.read = read self.write = write TraditionalGitClient.__init__(self) def _connect(self, service, path): return Protocol(self.read, self.write), self.can_read, None class DummyPopen: def __init__(self, *args, **kwards): self.stdin = BytesIO(b"stdin") self.stdout = BytesIO(b"stdout") self.stderr = BytesIO(b"stderr") self.returncode = 0 self.args = args self.kwargs = kwards def communicate(self, *args, **kwards): return ("Running", "") def wait(self, *args, **kwards): return False # TODO(durin42): add unit-level tests of GitClient class GitClientTests(TestCase): def setUp(self): super(GitClientTests, self).setUp() self.rout = BytesIO() self.rin = BytesIO() self.client = DummyClient(lambda x: True, self.rin.read, self.rout.write) def test_caps(self): agent_cap = ("agent=dulwich/%d.%d.%d" % dulwich.__version__).encode("ascii") self.assertEqual( set( [ b"multi_ack", b"side-band-64k", b"ofs-delta", b"thin-pack", b"multi_ack_detailed", b"shallow", agent_cap, ] ), set(self.client._fetch_capabilities), ) self.assertEqual( set( [ b"delete-refs", b"ofs-delta", b"report-status", b"side-band-64k", agent_cap, ] ), set(self.client._send_capabilities), ) def test_archive_ack(self): self.rin.write(b"0009NACK\n" b"0000") self.rin.seek(0) self.client.archive(b"bla", b"HEAD", None, None) self.assertEqual(self.rout.getvalue(), b"0011argument HEAD0000") def test_fetch_empty(self): self.rin.write(b"0000") self.rin.seek(0) def check_heads(heads): self.assertEqual(heads, {}) return [] ret = self.client.fetch_pack(b"/", check_heads, None, None) self.assertEqual({}, ret.refs) self.assertEqual({}, ret.symrefs) def test_fetch_pack_ignores_magic_ref(self): self.rin.write( b"00000000000000000000000000000000000000000000 capabilities^{}" b"\x00 multi_ack " b"thin-pack side-band side-band-64k ofs-delta shallow no-progress " b"include-tag\n" b"0000" ) self.rin.seek(0) def check_heads(heads): self.assertEqual({}, heads) return [] ret = self.client.fetch_pack(b"bla", check_heads, None, None, None) self.assertEqual({}, ret.refs) self.assertEqual({}, ret.symrefs) self.assertEqual(self.rout.getvalue(), b"0000") def test_fetch_pack_none(self): self.rin.write( b"008855dcc6bf963f922e1ed5c4bbaaefcfacef57b1d7 HEAD\x00multi_ack " b"thin-pack side-band side-band-64k ofs-delta shallow no-progress " b"include-tag\n" b"0000" ) self.rin.seek(0) ret = self.client.fetch_pack(b"bla", lambda heads: [], None, None, None) self.assertEqual( {b"HEAD": b"55dcc6bf963f922e1ed5c4bbaaefcfacef57b1d7"}, ret.refs ) self.assertEqual({}, ret.symrefs) self.assertEqual(self.rout.getvalue(), b"0000") def test_send_pack_no_sideband64k_with_update_ref_error(self): # No side-bank-64k reported by server shouldn't try to parse # side band data pkts = [ b"55dcc6bf963f922e1ed5c4bbaaefcfacef57b1d7 capabilities^{}" b"\x00 report-status delete-refs ofs-delta\n", b"", b"unpack ok", b"ng refs/foo/bar pre-receive hook declined", b"", ] for pkt in pkts: if pkt == b"": self.rin.write(b"0000") else: self.rin.write(("%04x" % (len(pkt) + 4)).encode("ascii") + pkt) self.rin.seek(0) tree = Tree() commit = Commit() commit.tree = tree commit.parents = [] commit.author = commit.committer = b"test user" commit.commit_time = commit.author_time = 1174773719 commit.commit_timezone = commit.author_timezone = 0 commit.encoding = b"UTF-8" commit.message = b"test message" def update_refs(refs): return { b"refs/foo/bar": commit.id, } def generate_pack_data(have, want, ofs_delta=False): return pack_objects_to_data( [ (commit, None), (tree, ""), ] ) result = self.client.send_pack("blah", update_refs, generate_pack_data) self.assertEqual( {b"refs/foo/bar": "pre-receive hook declined"}, result.ref_status ) self.assertEqual({b"refs/foo/bar": commit.id}, result.refs) def test_send_pack_none(self): # Set ref to current value self.rin.write( b"0078310ca9477129b8586fa2afc779c1f57cf64bba6c " b"refs/heads/master\x00 report-status delete-refs " b"side-band-64k quiet ofs-delta\n" b"0000" ) self.rin.seek(0) def update_refs(refs): return {b"refs/heads/master": b"310ca9477129b8586fa2afc779c1f57cf64bba6c"} def generate_pack_data(have, want, ofs_delta=False): return 0, [] self.client.send_pack(b"/", update_refs, generate_pack_data) self.assertEqual(self.rout.getvalue(), b"0000") def test_send_pack_keep_and_delete(self): self.rin.write( b"0063310ca9477129b8586fa2afc779c1f57cf64bba6c " b"refs/heads/master\x00report-status delete-refs ofs-delta\n" b"003f310ca9477129b8586fa2afc779c1f57cf64bba6c refs/heads/keepme\n" b"0000000eunpack ok\n" b"0019ok refs/heads/master\n" b"0000" ) self.rin.seek(0) def update_refs(refs): return {b"refs/heads/master": b"0" * 40} def generate_pack_data(have, want, ofs_delta=False): return 0, [] self.client.send_pack(b"/", update_refs, generate_pack_data) self.assertEqual( self.rout.getvalue(), b"008b310ca9477129b8586fa2afc779c1f57cf64bba6c " b"0000000000000000000000000000000000000000 " b"refs/heads/master\x00delete-refs ofs-delta report-status0000", ) def test_send_pack_delete_only(self): self.rin.write( b"0063310ca9477129b8586fa2afc779c1f57cf64bba6c " b"refs/heads/master\x00report-status delete-refs ofs-delta\n" b"0000000eunpack ok\n" b"0019ok refs/heads/master\n" b"0000" ) self.rin.seek(0) def update_refs(refs): return {b"refs/heads/master": b"0" * 40} def generate_pack_data(have, want, ofs_delta=False): return 0, [] self.client.send_pack(b"/", update_refs, generate_pack_data) self.assertEqual( self.rout.getvalue(), b"008b310ca9477129b8586fa2afc779c1f57cf64bba6c " b"0000000000000000000000000000000000000000 " b"refs/heads/master\x00delete-refs ofs-delta report-status0000", ) def test_send_pack_new_ref_only(self): self.rin.write( b"0063310ca9477129b8586fa2afc779c1f57cf64bba6c " b"refs/heads/master\x00report-status delete-refs ofs-delta\n" b"0000000eunpack ok\n" b"0019ok refs/heads/blah12\n" b"0000" ) self.rin.seek(0) def update_refs(refs): return { b"refs/heads/blah12": b"310ca9477129b8586fa2afc779c1f57cf64bba6c", b"refs/heads/master": b"310ca9477129b8586fa2afc779c1f57cf64bba6c", } def generate_pack_data(have, want, ofs_delta=False): return 0, [] f = BytesIO() write_pack_objects(f, {}) self.client.send_pack("/", update_refs, generate_pack_data) self.assertEqual( self.rout.getvalue(), b"008b0000000000000000000000000000000000000000 " b"310ca9477129b8586fa2afc779c1f57cf64bba6c " b"refs/heads/blah12\x00delete-refs ofs-delta report-status0000" + f.getvalue(), ) def test_send_pack_new_ref(self): self.rin.write( b"0064310ca9477129b8586fa2afc779c1f57cf64bba6c " b"refs/heads/master\x00 report-status delete-refs ofs-delta\n" b"0000000eunpack ok\n" b"0019ok refs/heads/blah12\n" b"0000" ) self.rin.seek(0) tree = Tree() commit = Commit() commit.tree = tree commit.parents = [] commit.author = commit.committer = b"test user" commit.commit_time = commit.author_time = 1174773719 commit.commit_timezone = commit.author_timezone = 0 commit.encoding = b"UTF-8" commit.message = b"test message" def update_refs(refs): return { b"refs/heads/blah12": commit.id, b"refs/heads/master": b"310ca9477129b8586fa2afc779c1f57cf64bba6c", } def generate_pack_data(have, want, ofs_delta=False): return pack_objects_to_data( [ (commit, None), (tree, b""), ] ) f = BytesIO() write_pack_data(f, *generate_pack_data(None, None)) self.client.send_pack(b"/", update_refs, generate_pack_data) self.assertEqual( self.rout.getvalue(), b"008b0000000000000000000000000000000000000000 " + commit.id + b" refs/heads/blah12\x00delete-refs ofs-delta report-status0000" + f.getvalue(), ) def test_send_pack_no_deleteref_delete_only(self): pkts = [ b"310ca9477129b8586fa2afc779c1f57cf64bba6c refs/heads/master" b"\x00 report-status ofs-delta\n", b"", b"", ] for pkt in pkts: if pkt == b"": self.rin.write(b"0000") else: self.rin.write(("%04x" % (len(pkt) + 4)).encode("ascii") + pkt) self.rin.seek(0) def update_refs(refs): return {b"refs/heads/master": b"0" * 40} def generate_pack_data(have, want, ofs_delta=False): return 0, [] result = self.client.send_pack(b"/", update_refs, generate_pack_data) self.assertEqual( result.ref_status, {b"refs/heads/master": "remote does not support deleting refs"}, ) self.assertEqual( result.refs, {b"refs/heads/master": b"310ca9477129b8586fa2afc779c1f57cf64bba6c"}, ) self.assertEqual(self.rout.getvalue(), b"0000") class TestGetTransportAndPath(TestCase): def test_tcp(self): c, path = get_transport_and_path("git://foo.com/bar/baz") self.assertTrue(isinstance(c, TCPGitClient)) self.assertEqual("foo.com", c._host) self.assertEqual(TCP_GIT_PORT, c._port) self.assertEqual("/bar/baz", path) def test_tcp_port(self): c, path = get_transport_and_path("git://foo.com:1234/bar/baz") self.assertTrue(isinstance(c, TCPGitClient)) self.assertEqual("foo.com", c._host) self.assertEqual(1234, c._port) self.assertEqual("/bar/baz", path) def test_git_ssh_explicit(self): c, path = get_transport_and_path("git+ssh://foo.com/bar/baz") self.assertTrue(isinstance(c, SSHGitClient)) self.assertEqual("foo.com", c.host) self.assertEqual(None, c.port) self.assertEqual(None, c.username) self.assertEqual("/bar/baz", path) def test_ssh_explicit(self): c, path = get_transport_and_path("ssh://foo.com/bar/baz") self.assertTrue(isinstance(c, SSHGitClient)) self.assertEqual("foo.com", c.host) self.assertEqual(None, c.port) self.assertEqual(None, c.username) self.assertEqual("/bar/baz", path) def test_ssh_port_explicit(self): c, path = get_transport_and_path("git+ssh://foo.com:1234/bar/baz") self.assertTrue(isinstance(c, SSHGitClient)) self.assertEqual("foo.com", c.host) self.assertEqual(1234, c.port) self.assertEqual("/bar/baz", path) def test_username_and_port_explicit_unknown_scheme(self): c, path = get_transport_and_path("unknown://git@server:7999/dply/stuff.git") self.assertTrue(isinstance(c, SSHGitClient)) self.assertEqual("unknown", c.host) self.assertEqual("//git@server:7999/dply/stuff.git", path) def test_username_and_port_explicit(self): c, path = get_transport_and_path("ssh://git@server:7999/dply/stuff.git") self.assertTrue(isinstance(c, SSHGitClient)) self.assertEqual("git", c.username) self.assertEqual("server", c.host) self.assertEqual(7999, c.port) self.assertEqual("/dply/stuff.git", path) def test_ssh_abspath_doubleslash(self): c, path = get_transport_and_path("git+ssh://foo.com//bar/baz") self.assertTrue(isinstance(c, SSHGitClient)) self.assertEqual("foo.com", c.host) self.assertEqual(None, c.port) self.assertEqual(None, c.username) self.assertEqual("//bar/baz", path) def test_ssh_port(self): c, path = get_transport_and_path("git+ssh://foo.com:1234/bar/baz") self.assertTrue(isinstance(c, SSHGitClient)) self.assertEqual("foo.com", c.host) self.assertEqual(1234, c.port) self.assertEqual("/bar/baz", path) def test_ssh_implicit(self): c, path = get_transport_and_path("foo:/bar/baz") self.assertTrue(isinstance(c, SSHGitClient)) self.assertEqual("foo", c.host) self.assertEqual(None, c.port) self.assertEqual(None, c.username) self.assertEqual("/bar/baz", path) def test_ssh_host(self): c, path = get_transport_and_path("foo.com:/bar/baz") self.assertTrue(isinstance(c, SSHGitClient)) self.assertEqual("foo.com", c.host) self.assertEqual(None, c.port) self.assertEqual(None, c.username) self.assertEqual("/bar/baz", path) def test_ssh_user_host(self): c, path = get_transport_and_path("user@foo.com:/bar/baz") self.assertTrue(isinstance(c, SSHGitClient)) self.assertEqual("foo.com", c.host) self.assertEqual(None, c.port) self.assertEqual("user", c.username) self.assertEqual("/bar/baz", path) def test_ssh_relpath(self): c, path = get_transport_and_path("foo:bar/baz") self.assertTrue(isinstance(c, SSHGitClient)) self.assertEqual("foo", c.host) self.assertEqual(None, c.port) self.assertEqual(None, c.username) self.assertEqual("bar/baz", path) def test_ssh_host_relpath(self): c, path = get_transport_and_path("foo.com:bar/baz") self.assertTrue(isinstance(c, SSHGitClient)) self.assertEqual("foo.com", c.host) self.assertEqual(None, c.port) self.assertEqual(None, c.username) self.assertEqual("bar/baz", path) def test_ssh_user_host_relpath(self): c, path = get_transport_and_path("user@foo.com:bar/baz") self.assertTrue(isinstance(c, SSHGitClient)) self.assertEqual("foo.com", c.host) self.assertEqual(None, c.port) self.assertEqual("user", c.username) self.assertEqual("bar/baz", path) def test_local(self): c, path = get_transport_and_path("foo.bar/baz") self.assertTrue(isinstance(c, LocalGitClient)) self.assertEqual("foo.bar/baz", path) @skipIf(sys.platform != "win32", "Behaviour only happens on windows.") def test_local_abs_windows_path(self): c, path = get_transport_and_path("C:\\foo.bar\\baz") self.assertTrue(isinstance(c, LocalGitClient)) self.assertEqual("C:\\foo.bar\\baz", path) def test_error(self): # Need to use a known urlparse.uses_netloc URL scheme to get the # expected parsing of the URL on Python versions less than 2.6.5 c, path = get_transport_and_path("prospero://bar/baz") self.assertTrue(isinstance(c, SSHGitClient)) def test_http(self): url = "https://github.com/jelmer/dulwich" c, path = get_transport_and_path(url) self.assertTrue(isinstance(c, HttpGitClient)) self.assertEqual("/jelmer/dulwich", path) def test_http_auth(self): url = "https://user:passwd@github.com/jelmer/dulwich" c, path = get_transport_and_path(url) self.assertTrue(isinstance(c, HttpGitClient)) self.assertEqual("/jelmer/dulwich", path) self.assertEqual("user", c._username) self.assertEqual("passwd", c._password) def test_http_auth_with_username(self): url = "https://github.com/jelmer/dulwich" c, path = get_transport_and_path(url, username="user2", password="blah") self.assertTrue(isinstance(c, HttpGitClient)) self.assertEqual("/jelmer/dulwich", path) self.assertEqual("user2", c._username) self.assertEqual("blah", c._password) def test_http_auth_with_username_and_in_url(self): url = "https://user:passwd@github.com/jelmer/dulwich" c, path = get_transport_and_path(url, username="user2", password="blah") self.assertTrue(isinstance(c, HttpGitClient)) self.assertEqual("/jelmer/dulwich", path) self.assertEqual("user", c._username) self.assertEqual("passwd", c._password) def test_http_no_auth(self): url = "https://github.com/jelmer/dulwich" c, path = get_transport_and_path(url) self.assertTrue(isinstance(c, HttpGitClient)) self.assertEqual("/jelmer/dulwich", path) self.assertIs(None, c._username) self.assertIs(None, c._password) class TestGetTransportAndPathFromUrl(TestCase): def test_tcp(self): c, path = get_transport_and_path_from_url("git://foo.com/bar/baz") self.assertTrue(isinstance(c, TCPGitClient)) self.assertEqual("foo.com", c._host) self.assertEqual(TCP_GIT_PORT, c._port) self.assertEqual("/bar/baz", path) def test_tcp_port(self): c, path = get_transport_and_path_from_url("git://foo.com:1234/bar/baz") self.assertTrue(isinstance(c, TCPGitClient)) self.assertEqual("foo.com", c._host) self.assertEqual(1234, c._port) self.assertEqual("/bar/baz", path) def test_ssh_explicit(self): c, path = get_transport_and_path_from_url("git+ssh://foo.com/bar/baz") self.assertTrue(isinstance(c, SSHGitClient)) self.assertEqual("foo.com", c.host) self.assertEqual(None, c.port) self.assertEqual(None, c.username) self.assertEqual("/bar/baz", path) def test_ssh_port_explicit(self): c, path = get_transport_and_path_from_url("git+ssh://foo.com:1234/bar/baz") self.assertTrue(isinstance(c, SSHGitClient)) self.assertEqual("foo.com", c.host) self.assertEqual(1234, c.port) self.assertEqual("/bar/baz", path) def test_ssh_homepath(self): c, path = get_transport_and_path_from_url("git+ssh://foo.com/~/bar/baz") self.assertTrue(isinstance(c, SSHGitClient)) self.assertEqual("foo.com", c.host) self.assertEqual(None, c.port) self.assertEqual(None, c.username) self.assertEqual("/~/bar/baz", path) def test_ssh_port_homepath(self): c, path = get_transport_and_path_from_url("git+ssh://foo.com:1234/~/bar/baz") self.assertTrue(isinstance(c, SSHGitClient)) self.assertEqual("foo.com", c.host) self.assertEqual(1234, c.port) self.assertEqual("/~/bar/baz", path) def test_ssh_host_relpath(self): self.assertRaises( ValueError, get_transport_and_path_from_url, "foo.com:bar/baz" ) def test_ssh_user_host_relpath(self): self.assertRaises( ValueError, get_transport_and_path_from_url, "user@foo.com:bar/baz" ) def test_local_path(self): self.assertRaises(ValueError, get_transport_and_path_from_url, "foo.bar/baz") def test_error(self): # Need to use a known urlparse.uses_netloc URL scheme to get the # expected parsing of the URL on Python versions less than 2.6.5 self.assertRaises( ValueError, get_transport_and_path_from_url, "prospero://bar/baz" ) def test_http(self): url = "https://github.com/jelmer/dulwich" c, path = get_transport_and_path_from_url(url) self.assertTrue(isinstance(c, HttpGitClient)) self.assertEqual("https://github.com", c.get_url(b"/")) self.assertEqual("/jelmer/dulwich", path) def test_http_port(self): url = "https://github.com:9090/jelmer/dulwich" c, path = get_transport_and_path_from_url(url) self.assertEqual("https://github.com:9090", c.get_url(b"/")) self.assertTrue(isinstance(c, HttpGitClient)) self.assertEqual("/jelmer/dulwich", path) def test_file(self): c, path = get_transport_and_path_from_url("file:///home/jelmer/foo") self.assertTrue(isinstance(c, LocalGitClient)) self.assertEqual("/home/jelmer/foo", path) class TestSSHVendor(object): def __init__(self): self.host = None self.command = "" self.username = None self.port = None self.password = None self.key_filename = None def run_command( - self, host, command, username=None, port=None, password=None, key_filename=None + self, + host, + command, + username=None, + port=None, + password=None, + key_filename=None, ): self.host = host self.command = command self.username = username self.port = port self.password = password self.key_filename = key_filename class Subprocess: pass setattr(Subprocess, "read", lambda: None) setattr(Subprocess, "write", lambda: None) setattr(Subprocess, "close", lambda: None) setattr(Subprocess, "can_read", lambda: None) return Subprocess() class SSHGitClientTests(TestCase): def setUp(self): super(SSHGitClientTests, self).setUp() self.server = TestSSHVendor() self.real_vendor = client.get_ssh_vendor client.get_ssh_vendor = lambda: self.server self.client = SSHGitClient("git.samba.org") def tearDown(self): super(SSHGitClientTests, self).tearDown() client.get_ssh_vendor = self.real_vendor def test_get_url(self): path = "/tmp/repo.git" c = SSHGitClient("git.samba.org") url = c.get_url(path) self.assertEqual("ssh://git.samba.org/tmp/repo.git", url) def test_get_url_with_username_and_port(self): path = "/tmp/repo.git" c = SSHGitClient("git.samba.org", port=2222, username="user") url = c.get_url(path) self.assertEqual("ssh://user@git.samba.org:2222/tmp/repo.git", url) def test_default_command(self): self.assertEqual(b"git-upload-pack", self.client._get_cmd_path(b"upload-pack")) def test_alternative_command_path(self): self.client.alternative_paths[b"upload-pack"] = b"/usr/lib/git/git-upload-pack" self.assertEqual( - b"/usr/lib/git/git-upload-pack", self.client._get_cmd_path(b"upload-pack") + b"/usr/lib/git/git-upload-pack", + self.client._get_cmd_path(b"upload-pack"), ) def test_alternative_command_path_spaces(self): self.client.alternative_paths[ b"upload-pack" ] = b"/usr/lib/git/git-upload-pack -ibla" self.assertEqual( b"/usr/lib/git/git-upload-pack -ibla", self.client._get_cmd_path(b"upload-pack"), ) def test_connect(self): server = self.server client = self.client client.username = b"username" client.port = 1337 client._connect(b"command", b"/path/to/repo") self.assertEqual(b"username", server.username) self.assertEqual(1337, server.port) self.assertEqual("git-command '/path/to/repo'", server.command) client._connect(b"relative-command", b"/~/path/to/repo") self.assertEqual("git-relative-command '~/path/to/repo'", server.command) class ReportStatusParserTests(TestCase): def test_invalid_pack(self): parser = ReportStatusParser() parser.handle_packet(b"unpack error - foo bar") parser.handle_packet(b"ok refs/foo/bar") parser.handle_packet(None) self.assertRaises(SendPackError, list, parser.check()) def test_update_refs_error(self): parser = ReportStatusParser() parser.handle_packet(b"unpack ok") parser.handle_packet(b"ng refs/foo/bar need to pull") parser.handle_packet(None) self.assertEqual([(b"refs/foo/bar", "need to pull")], list(parser.check())) def test_ok(self): parser = ReportStatusParser() parser.handle_packet(b"unpack ok") parser.handle_packet(b"ok refs/foo/bar") parser.handle_packet(None) self.assertEqual([(b"refs/foo/bar", None)], list(parser.check())) class LocalGitClientTests(TestCase): def test_get_url(self): path = "/tmp/repo.git" c = LocalGitClient() url = c.get_url(path) self.assertEqual("file:///tmp/repo.git", url) def test_fetch_into_empty(self): c = LocalGitClient() t = MemoryRepo() s = open_repo("a.git") self.addCleanup(tear_down_repo, s) self.assertEqual(s.get_refs(), c.fetch(s.path, t).refs) def test_fetch_empty(self): c = LocalGitClient() s = open_repo("a.git") self.addCleanup(tear_down_repo, s) out = BytesIO() walker = {} ret = c.fetch_pack( s.path, lambda heads: [], graph_walker=walker, pack_data=out.write ) self.assertEqual( { b"HEAD": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", b"refs/heads/master": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", b"refs/tags/mytag": b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a", b"refs/tags/mytag-packed": b"b0931cadc54336e78a1d980420e3268903b57a50", }, ret.refs, ) self.assertEqual({b"HEAD": b"refs/heads/master"}, ret.symrefs) self.assertEqual( b"PACK\x00\x00\x00\x02\x00\x00\x00\x00\x02\x9d\x08" b"\x82;\xd8\xa8\xea\xb5\x10\xadj\xc7\\\x82<\xfd>\xd3\x1e", out.getvalue(), ) def test_fetch_pack_none(self): c = LocalGitClient() s = open_repo("a.git") self.addCleanup(tear_down_repo, s) out = BytesIO() walker = MemoryRepo().get_graph_walker() ret = c.fetch_pack( s.path, lambda heads: [b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"], graph_walker=walker, pack_data=out.write, ) self.assertEqual({b"HEAD": b"refs/heads/master"}, ret.symrefs) self.assertEqual( { b"HEAD": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", b"refs/heads/master": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", b"refs/tags/mytag": b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a", b"refs/tags/mytag-packed": b"b0931cadc54336e78a1d980420e3268903b57a50", }, ret.refs, ) # Hardcoding is not ideal, but we'll fix that some other day.. self.assertTrue( out.getvalue().startswith(b"PACK\x00\x00\x00\x02\x00\x00\x00\x07") ) def test_send_pack_without_changes(self): local = open_repo("a.git") self.addCleanup(tear_down_repo, local) target = open_repo("a.git") self.addCleanup(tear_down_repo, target) self.send_and_verify(b"master", local, target) def test_send_pack_with_changes(self): local = open_repo("a.git") self.addCleanup(tear_down_repo, local) target_path = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, target_path) with Repo.init_bare(target_path) as target: self.send_and_verify(b"master", local, target) def test_get_refs(self): local = open_repo("refs.git") self.addCleanup(tear_down_repo, local) client = LocalGitClient() refs = client.get_refs(local.path) self.assertDictEqual(local.refs.as_dict(), refs) def send_and_verify(self, branch, local, target): """Send branch from local to remote repository and verify it worked.""" client = LocalGitClient() ref_name = b"refs/heads/" + branch result = client.send_pack( target.path, lambda _: {ref_name: local.refs[ref_name]}, local.generate_pack_data, ) self.assertEqual(local.refs[ref_name], result.refs[ref_name]) self.assertIs(None, result.agent) self.assertEqual({}, result.ref_status) obj_local = local.get_object(result.refs[ref_name]) obj_target = target.get_object(result.refs[ref_name]) self.assertEqual(obj_local, obj_target) class HttpGitClientTests(TestCase): def test_get_url(self): base_url = "https://github.com/jelmer/dulwich" path = "/jelmer/dulwich" c = HttpGitClient(base_url) url = c.get_url(path) self.assertEqual("https://github.com/jelmer/dulwich", url) def test_get_url_bytes_path(self): base_url = "https://github.com/jelmer/dulwich" path_bytes = b"/jelmer/dulwich" c = HttpGitClient(base_url) url = c.get_url(path_bytes) self.assertEqual("https://github.com/jelmer/dulwich", url) def test_get_url_with_username_and_passwd(self): base_url = "https://github.com/jelmer/dulwich" path = "/jelmer/dulwich" c = HttpGitClient(base_url, username="USERNAME", password="PASSWD") url = c.get_url(path) self.assertEqual("https://github.com/jelmer/dulwich", url) def test_init_username_passwd_set(self): url = "https://github.com/jelmer/dulwich" c = HttpGitClient(url, config=None, username="user", password="passwd") self.assertEqual("user", c._username) self.assertEqual("passwd", c._password) basic_auth = c.pool_manager.headers["authorization"] auth_string = "%s:%s" % ("user", "passwd") b64_credentials = base64.b64encode(auth_string.encode("latin1")) expected_basic_auth = "Basic %s" % b64_credentials.decode("latin1") self.assertEqual(basic_auth, expected_basic_auth) def test_init_no_username_passwd(self): url = "https://github.com/jelmer/dulwich" c = HttpGitClient(url, config=None) self.assertIs(None, c._username) self.assertIs(None, c._password) self.assertNotIn("authorization", c.pool_manager.headers) def test_from_parsedurl_on_url_with_quoted_credentials(self): original_username = "john|the|first" quoted_username = urlquote(original_username) original_password = "Ya#1$2%3" quoted_password = urlquote(original_password) url = "https://{username}:{password}@github.com/jelmer/dulwich".format( username=quoted_username, password=quoted_password ) c = HttpGitClient.from_parsedurl(urlparse(url)) self.assertEqual(original_username, c._username) self.assertEqual(original_password, c._password) basic_auth = c.pool_manager.headers["authorization"] auth_string = "%s:%s" % (original_username, original_password) b64_credentials = base64.b64encode(auth_string.encode("latin1")) expected_basic_auth = "Basic %s" % b64_credentials.decode("latin1") self.assertEqual(basic_auth, expected_basic_auth) def test_url_redirect_location(self): from urllib3.response import HTTPResponse test_data = { "https://gitlab.com/inkscape/inkscape/": { "redirect_url": "https://gitlab.com/inkscape/inkscape.git/", "refs_data": ( b"001e# service=git-upload-pack\n00000032" b"fb2bebf4919a011f0fd7cec085443d0031228e76 " b"HEAD\n0000" ), }, "https://github.com/jelmer/dulwich/": { "redirect_url": "https://github.com/jelmer/dulwich/", "refs_data": ( b"001e# service=git-upload-pack\n00000032" b"3ff25e09724aa4d86ea5bca7d5dd0399a3c8bfcf " b"HEAD\n0000" ), }, } tail = "info/refs?service=git-upload-pack" # we need to mock urllib3.PoolManager as this test will fail # otherwise without an active internet connection class PoolManagerMock: def __init__(self): self.headers = {} def request(self, method, url, fields=None, headers=None, redirect=True): base_url = url[: -len(tail)] redirect_base_url = test_data[base_url]["redirect_url"] redirect_url = redirect_base_url + tail headers = { "Content-Type": "application/x-git-upload-pack-advertisement" } body = test_data[base_url]["refs_data"] # urllib3 handles automatic redirection by default status = 200 request_url = redirect_url # simulate urllib3 behavior when redirect parameter is False if redirect is False: request_url = url if redirect_base_url != base_url: body = "" headers["location"] = redirect_url status = 301 return HTTPResponse( body=body, headers=headers, request_method=method, request_url=request_url, status=status, ) pool_manager = PoolManagerMock() for base_url in test_data.keys(): # instantiate HttpGitClient with mocked pool manager c = HttpGitClient(base_url, pool_manager=pool_manager, config=None) # call method that detects url redirection _, _, processed_url = c._discover_references(b"git-upload-pack", base_url) # send the same request as the method above without redirection resp = c.pool_manager.request("GET", base_url + tail, redirect=False) # check expected behavior of urllib3 redirect_location = resp.get_redirect_location() if resp.status == 200: self.assertFalse(redirect_location) if redirect_location: # check that url redirection has been correctly detected self.assertEqual(processed_url, redirect_location[: -len(tail)]) else: # check also the no redirection case self.assertEqual(processed_url, base_url) class TCPGitClientTests(TestCase): def test_get_url(self): host = "github.com" path = "/jelmer/dulwich" c = TCPGitClient(host) url = c.get_url(path) self.assertEqual("git://github.com/jelmer/dulwich", url) def test_get_url_with_port(self): host = "github.com" path = "/jelmer/dulwich" port = 9090 c = TCPGitClient(host, port=port) url = c.get_url(path) self.assertEqual("git://github.com:9090/jelmer/dulwich", url) class DefaultUrllib3ManagerTest(TestCase): def test_no_config(self): manager = default_urllib3_manager(config=None) self.assertEqual(manager.connection_pool_kw["cert_reqs"], "CERT_REQUIRED") def test_config_no_proxy(self): import urllib3 manager = default_urllib3_manager(config=ConfigDict()) self.assertNotIsInstance(manager, urllib3.ProxyManager) self.assertIsInstance(manager, urllib3.PoolManager) def test_config_no_proxy_custom_cls(self): import urllib3 class CustomPoolManager(urllib3.PoolManager): pass manager = default_urllib3_manager( config=ConfigDict(), pool_manager_cls=CustomPoolManager ) self.assertIsInstance(manager, CustomPoolManager) def test_config_ssl(self): config = ConfigDict() config.set(b"http", b"sslVerify", b"true") manager = default_urllib3_manager(config=config) self.assertEqual(manager.connection_pool_kw["cert_reqs"], "CERT_REQUIRED") def test_config_no_ssl(self): config = ConfigDict() config.set(b"http", b"sslVerify", b"false") manager = default_urllib3_manager(config=config) self.assertEqual(manager.connection_pool_kw["cert_reqs"], "CERT_NONE") def test_config_proxy(self): import urllib3 config = ConfigDict() config.set(b"http", b"proxy", b"http://localhost:3128/") manager = default_urllib3_manager(config=config) self.assertIsInstance(manager, urllib3.ProxyManager) self.assertTrue(hasattr(manager, "proxy")) self.assertEqual(manager.proxy.scheme, "http") self.assertEqual(manager.proxy.host, "localhost") self.assertEqual(manager.proxy.port, 3128) def test_environment_proxy(self): import urllib3 config = ConfigDict() os.environ["http_proxy"] = "http://myproxy:8080" manager = default_urllib3_manager(config=config) self.assertIsInstance(manager, urllib3.ProxyManager) self.assertTrue(hasattr(manager, "proxy")) self.assertEqual(manager.proxy.scheme, "http") self.assertEqual(manager.proxy.host, "myproxy") self.assertEqual(manager.proxy.port, 8080) del os.environ["http_proxy"] def test_config_proxy_custom_cls(self): import urllib3 class CustomProxyManager(urllib3.ProxyManager): pass config = ConfigDict() config.set(b"http", b"proxy", b"http://localhost:3128/") manager = default_urllib3_manager( config=config, proxy_manager_cls=CustomProxyManager ) self.assertIsInstance(manager, CustomProxyManager) def test_config_no_verify_ssl(self): manager = default_urllib3_manager(config=None, cert_reqs="CERT_NONE") self.assertEqual(manager.connection_pool_kw["cert_reqs"], "CERT_NONE") class SubprocessSSHVendorTests(TestCase): def setUp(self): # Monkey Patch client subprocess popen self._orig_popen = dulwich.client.subprocess.Popen dulwich.client.subprocess.Popen = DummyPopen def tearDown(self): dulwich.client.subprocess.Popen = self._orig_popen def test_run_command_dashes(self): vendor = SubprocessSSHVendor() self.assertRaises( - StrangeHostname, vendor.run_command, "--weird-host", "git-clone-url" + StrangeHostname, + vendor.run_command, + "--weird-host", + "git-clone-url", ) def test_run_command_password(self): vendor = SubprocessSSHVendor() self.assertRaises( NotImplementedError, vendor.run_command, "host", "git-clone-url", password="12345", ) def test_run_command_password_and_privkey(self): vendor = SubprocessSSHVendor() self.assertRaises( NotImplementedError, vendor.run_command, "host", "git-clone-url", password="12345", key_filename="/tmp/id_rsa", ) def test_run_command_with_port_username_and_privkey(self): expected = [ "ssh", "-x", "-p", "2200", "-i", "/tmp/id_rsa", "user@host", "git-clone-url", ] vendor = SubprocessSSHVendor() command = vendor.run_command( "host", "git-clone-url", username="user", port="2200", key_filename="/tmp/id_rsa", ) args = command.proc.args self.assertListEqual(expected, args[0]) class PLinkSSHVendorTests(TestCase): def setUp(self): # Monkey Patch client subprocess popen self._orig_popen = dulwich.client.subprocess.Popen dulwich.client.subprocess.Popen = DummyPopen def tearDown(self): dulwich.client.subprocess.Popen = self._orig_popen def test_run_command_dashes(self): vendor = PLinkSSHVendor() self.assertRaises( - StrangeHostname, vendor.run_command, "--weird-host", "git-clone-url" + StrangeHostname, + vendor.run_command, + "--weird-host", + "git-clone-url", ) def test_run_command_password_and_privkey(self): vendor = PLinkSSHVendor() warnings.simplefilter("always", UserWarning) self.addCleanup(warnings.resetwarnings) warnings_list, restore_warnings = setup_warning_catcher() self.addCleanup(restore_warnings) command = vendor.run_command( - "host", "git-clone-url", password="12345", key_filename="/tmp/id_rsa" + "host", + "git-clone-url", + password="12345", + key_filename="/tmp/id_rsa", ) expected_warning = UserWarning( "Invoking PLink with a password exposes the password in the " "process list." ) for w in warnings_list: if type(w) == type(expected_warning) and w.args == expected_warning.args: break else: raise AssertionError( "Expected warning %r not in %r" % (expected_warning, warnings_list) ) args = command.proc.args if sys.platform == "win32": binary = ["plink.exe", "-ssh"] else: binary = ["plink", "-ssh"] expected = binary + [ "-pw", "12345", "-i", "/tmp/id_rsa", "host", "git-clone-url", ] self.assertListEqual(expected, args[0]) def test_run_command_password(self): if sys.platform == "win32": binary = ["plink.exe", "-ssh"] else: binary = ["plink", "-ssh"] expected = binary + ["-pw", "12345", "host", "git-clone-url"] vendor = PLinkSSHVendor() warnings.simplefilter("always", UserWarning) self.addCleanup(warnings.resetwarnings) warnings_list, restore_warnings = setup_warning_catcher() self.addCleanup(restore_warnings) command = vendor.run_command("host", "git-clone-url", password="12345") expected_warning = UserWarning( "Invoking PLink with a password exposes the password in the " "process list." ) for w in warnings_list: if type(w) == type(expected_warning) and w.args == expected_warning.args: break else: raise AssertionError( "Expected warning %r not in %r" % (expected_warning, warnings_list) ) args = command.proc.args self.assertListEqual(expected, args[0]) def test_run_command_with_port_username_and_privkey(self): if sys.platform == "win32": binary = ["plink.exe", "-ssh"] else: binary = ["plink", "-ssh"] expected = binary + [ "-P", "2200", "-i", "/tmp/id_rsa", "user@host", "git-clone-url", ] vendor = PLinkSSHVendor() command = vendor.run_command( "host", "git-clone-url", username="user", port="2200", key_filename="/tmp/id_rsa", ) args = command.proc.args self.assertListEqual(expected, args[0]) class RsyncUrlTests(TestCase): def test_simple(self): self.assertEqual(parse_rsync_url("foo:bar/path"), (None, "foo", "bar/path")) self.assertEqual( parse_rsync_url("user@foo:bar/path"), ("user", "foo", "bar/path") ) def test_path(self): self.assertRaises(ValueError, parse_rsync_url, "/path") class CheckWantsTests(TestCase): def test_fine(self): check_wants( [b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262"], {b"refs/heads/blah": b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262"}, ) def test_missing(self): self.assertRaises( InvalidWants, check_wants, [b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262"], {b"refs/heads/blah": b"3f3dc7a53fb752a6961d3a56683df46d4d3bf262"}, ) def test_annotated(self): self.assertRaises( InvalidWants, check_wants, [b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262"], { b"refs/heads/blah": b"3f3dc7a53fb752a6961d3a56683df46d4d3bf262", b"refs/heads/blah^{}": b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262", }, ) class FetchPackResultTests(TestCase): def test_eq(self): self.assertEqual( FetchPackResult( {b"refs/heads/master": b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262"}, {}, b"user/agent", ), FetchPackResult( {b"refs/heads/master": b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262"}, {}, b"user/agent", ), ) class GitCredentialStoreTests(TestCase): @classmethod def setUpClass(cls): with tempfile.NamedTemporaryFile(delete=False) as f: f.write(b"https://user:pass@example.org\n") cls.fname = f.name @classmethod def tearDownClass(cls): os.unlink(cls.fname) def test_nonmatching_scheme(self): self.assertEqual( get_credentials_from_store(b"http", b"example.org", fnames=[self.fname]), None, ) def test_nonmatching_hostname(self): self.assertEqual( get_credentials_from_store(b"https", b"noentry.org", fnames=[self.fname]), None, ) def test_match_without_username(self): self.assertEqual( get_credentials_from_store(b"https", b"example.org", fnames=[self.fname]), (b"user", b"pass"), ) def test_match_with_matching_username(self): self.assertEqual( get_credentials_from_store( b"https", b"example.org", b"user", fnames=[self.fname] ), (b"user", b"pass"), ) def test_no_match_with_nonmatching_username(self): self.assertEqual( get_credentials_from_store( b"https", b"example.org", b"otheruser", fnames=[self.fname] ), None, ) class RemoteErrorFromStderrTests(TestCase): def test_nothing(self): self.assertEqual(_remote_error_from_stderr(None), HangupException()) def test_error_line(self): b = BytesIO( b"""\ This is some random output. ERROR: This is the actual error with a tail """ ) self.assertEqual( - _remote_error_from_stderr(b), GitProtocolError("This is the actual error") + _remote_error_from_stderr(b), + GitProtocolError("This is the actual error"), ) def test_no_error_line(self): b = BytesIO( b"""\ This is output without an error line. And this line is just random noise, too. """ ) self.assertEqual( _remote_error_from_stderr(b), HangupException( [ b"This is output without an error line.", b"And this line is just random noise, too.", ] ), ) diff --git a/dulwich/tests/test_config.py b/dulwich/tests/test_config.py index bd3dddd9..618b9b7c 100644 --- a/dulwich/tests/test_config.py +++ b/dulwich/tests/test_config.py @@ -1,357 +1,363 @@ # test_config.py -- Tests for reading and writing configuration files # Copyright (C) 2011 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Tests for reading and writing configuration files.""" from io import BytesIO from dulwich.config import ( ConfigDict, ConfigFile, StackedConfig, _check_section_name, _check_variable_name, _format_string, _escape_value, _parse_string, parse_submodules, ) from dulwich.tests import ( TestCase, ) class ConfigFileTests(TestCase): def from_file(self, text): return ConfigFile.from_file(BytesIO(text)) def test_empty(self): ConfigFile() def test_eq(self): self.assertEqual(ConfigFile(), ConfigFile()) def test_default_config(self): cf = self.from_file( b"""[core] \trepositoryformatversion = 0 \tfilemode = true \tbare = false \tlogallrefupdates = true """ ) self.assertEqual( ConfigFile( { (b"core",): { b"repositoryformatversion": b"0", b"filemode": b"true", b"bare": b"false", b"logallrefupdates": b"true", } } ), cf, ) def test_from_file_empty(self): cf = self.from_file(b"") self.assertEqual(ConfigFile(), cf) def test_empty_line_before_section(self): cf = self.from_file(b"\n[section]\n") self.assertEqual(ConfigFile({(b"section",): {}}), cf) def test_comment_before_section(self): cf = self.from_file(b"# foo\n[section]\n") self.assertEqual(ConfigFile({(b"section",): {}}), cf) def test_comment_after_section(self): cf = self.from_file(b"[section] # foo\n") self.assertEqual(ConfigFile({(b"section",): {}}), cf) def test_comment_after_variable(self): cf = self.from_file(b"[section]\nbar= foo # a comment\n") self.assertEqual(ConfigFile({(b"section",): {b"bar": b"foo"}}), cf) def test_comment_character_within_value_string(self): cf = self.from_file(b'[section]\nbar= "foo#bar"\n') self.assertEqual(ConfigFile({(b"section",): {b"bar": b"foo#bar"}}), cf) def test_comment_character_within_section_string(self): cf = self.from_file(b'[branch "foo#bar"] # a comment\nbar= foo\n') self.assertEqual(ConfigFile({(b"branch", b"foo#bar"): {b"bar": b"foo"}}), cf) def test_from_file_section(self): cf = self.from_file(b"[core]\nfoo = bar\n") self.assertEqual(b"bar", cf.get((b"core",), b"foo")) self.assertEqual(b"bar", cf.get((b"core", b"foo"), b"foo")) def test_from_file_section_case_insensitive_lower(self): cf = self.from_file(b"[cOre]\nfOo = bar\n") self.assertEqual(b"bar", cf.get((b"core",), b"foo")) self.assertEqual(b"bar", cf.get((b"core", b"foo"), b"foo")) def test_from_file_section_case_insensitive_mixed(self): cf = self.from_file(b"[cOre]\nfOo = bar\n") self.assertEqual(b"bar", cf.get((b"core",), b"fOo")) self.assertEqual(b"bar", cf.get((b"cOre", b"fOo"), b"fOo")) def test_from_file_with_mixed_quoted(self): cf = self.from_file(b'[core]\nfoo = "bar"la\n') self.assertEqual(b"barla", cf.get((b"core",), b"foo")) def test_from_file_section_with_open_brackets(self): self.assertRaises(ValueError, self.from_file, b"[core\nfoo = bar\n") def test_from_file_value_with_open_quoted(self): self.assertRaises(ValueError, self.from_file, b'[core]\nfoo = "bar\n') def test_from_file_with_quotes(self): cf = self.from_file(b"[core]\n" b'foo = " bar"\n') self.assertEqual(b" bar", cf.get((b"core",), b"foo")) def test_from_file_with_interrupted_line(self): cf = self.from_file(b"[core]\n" b"foo = bar\\\n" b" la\n") self.assertEqual(b"barla", cf.get((b"core",), b"foo")) def test_from_file_with_boolean_setting(self): cf = self.from_file(b"[core]\n" b"foo\n") self.assertEqual(b"true", cf.get((b"core",), b"foo")) def test_from_file_subsection(self): cf = self.from_file(b'[branch "foo"]\nfoo = bar\n') self.assertEqual(b"bar", cf.get((b"branch", b"foo"), b"foo")) def test_from_file_subsection_invalid(self): self.assertRaises(ValueError, self.from_file, b'[branch "foo]\nfoo = bar\n') def test_from_file_subsection_not_quoted(self): cf = self.from_file(b"[branch.foo]\nfoo = bar\n") self.assertEqual(b"bar", cf.get((b"branch", b"foo"), b"foo")) def test_write_to_file_empty(self): c = ConfigFile() f = BytesIO() c.write_to_file(f) self.assertEqual(b"", f.getvalue()) def test_write_to_file_section(self): c = ConfigFile() c.set((b"core",), b"foo", b"bar") f = BytesIO() c.write_to_file(f) self.assertEqual(b"[core]\n\tfoo = bar\n", f.getvalue()) def test_write_to_file_subsection(self): c = ConfigFile() c.set((b"branch", b"blie"), b"foo", b"bar") f = BytesIO() c.write_to_file(f) self.assertEqual(b'[branch "blie"]\n\tfoo = bar\n', f.getvalue()) def test_same_line(self): cf = self.from_file(b"[branch.foo] foo = bar\n") self.assertEqual(b"bar", cf.get((b"branch", b"foo"), b"foo")) def test_quoted(self): cf = self.from_file( b"""[gui] \tfontdiff = -family \\\"Ubuntu Mono\\\" -size 11 -overstrike 0 """ ) self.assertEqual( ConfigFile( { (b"gui",): { b"fontdiff": b'-family "Ubuntu Mono" -size 11 -overstrike 0', } } ), cf, ) def test_quoted_multiline(self): cf = self.from_file( b"""[alias] who = \"!who() {\\ git log --no-merges --pretty=format:'%an - %ae' $@ | uniq -c | sort -rn;\\ };\\ who\" """ ) self.assertEqual( ConfigFile( { (b"alias",): { b"who": ( b"!who() {git log --no-merges --pretty=format:'%an - " b"%ae' $@ | uniq -c | sort -rn;};who" ) } } ), cf, ) def test_set_hash_gets_quoted(self): c = ConfigFile() c.set(b"xandikos", b"color", b"#665544") f = BytesIO() c.write_to_file(f) self.assertEqual(b'[xandikos]\n\tcolor = "#665544"\n', f.getvalue()) class ConfigDictTests(TestCase): def test_get_set(self): cd = ConfigDict() self.assertRaises(KeyError, cd.get, b"foo", b"core") cd.set((b"core",), b"foo", b"bla") self.assertEqual(b"bla", cd.get((b"core",), b"foo")) cd.set((b"core",), b"foo", b"bloe") self.assertEqual(b"bloe", cd.get((b"core",), b"foo")) def test_get_boolean(self): cd = ConfigDict() cd.set((b"core",), b"foo", b"true") self.assertTrue(cd.get_boolean((b"core",), b"foo")) cd.set((b"core",), b"foo", b"false") self.assertFalse(cd.get_boolean((b"core",), b"foo")) cd.set((b"core",), b"foo", b"invalid") self.assertRaises(ValueError, cd.get_boolean, (b"core",), b"foo") def test_dict(self): cd = ConfigDict() cd.set((b"core",), b"foo", b"bla") cd.set((b"core2",), b"foo", b"bloe") self.assertEqual([(b"core",), (b"core2",)], list(cd.keys())) self.assertEqual(cd[(b"core",)], {b"foo": b"bla"}) cd[b"a"] = b"b" self.assertEqual(cd[b"a"], b"b") def test_iteritems(self): cd = ConfigDict() cd.set((b"core",), b"foo", b"bla") cd.set((b"core2",), b"foo", b"bloe") self.assertEqual([(b"foo", b"bla")], list(cd.iteritems((b"core",)))) def test_iteritems_nonexistant(self): cd = ConfigDict() cd.set((b"core2",), b"foo", b"bloe") self.assertEqual([], list(cd.iteritems((b"core",)))) def test_itersections(self): cd = ConfigDict() cd.set((b"core2",), b"foo", b"bloe") self.assertEqual([(b"core2",)], list(cd.itersections())) class StackedConfigTests(TestCase): def test_default_backends(self): StackedConfig.default_backends() class EscapeValueTests(TestCase): def test_nothing(self): self.assertEqual(b"foo", _escape_value(b"foo")) def test_backslash(self): self.assertEqual(b"foo\\\\", _escape_value(b"foo\\")) def test_newline(self): self.assertEqual(b"foo\\n", _escape_value(b"foo\n")) class FormatStringTests(TestCase): def test_quoted(self): self.assertEqual(b'" foo"', _format_string(b" foo")) self.assertEqual(b'"\\tfoo"', _format_string(b"\tfoo")) def test_not_quoted(self): self.assertEqual(b"foo", _format_string(b"foo")) self.assertEqual(b"foo bar", _format_string(b"foo bar")) class ParseStringTests(TestCase): def test_quoted(self): self.assertEqual(b" foo", _parse_string(b'" foo"')) self.assertEqual(b"\tfoo", _parse_string(b'"\\tfoo"')) def test_not_quoted(self): self.assertEqual(b"foo", _parse_string(b"foo")) self.assertEqual(b"foo bar", _parse_string(b"foo bar")) def test_nothing(self): self.assertEqual(b"", _parse_string(b"")) def test_tab(self): self.assertEqual(b"\tbar\t", _parse_string(b"\\tbar\\t")) def test_newline(self): self.assertEqual(b"\nbar\t", _parse_string(b"\\nbar\\t\t")) def test_quote(self): self.assertEqual(b'"foo"', _parse_string(b'\\"foo\\"')) class CheckVariableNameTests(TestCase): def test_invalid(self): self.assertFalse(_check_variable_name(b"foo ")) self.assertFalse(_check_variable_name(b"bar,bar")) self.assertFalse(_check_variable_name(b"bar.bar")) def test_valid(self): self.assertTrue(_check_variable_name(b"FOO")) self.assertTrue(_check_variable_name(b"foo")) self.assertTrue(_check_variable_name(b"foo-bar")) class CheckSectionNameTests(TestCase): def test_invalid(self): self.assertFalse(_check_section_name(b"foo ")) self.assertFalse(_check_section_name(b"bar,bar")) def test_valid(self): self.assertTrue(_check_section_name(b"FOO")) self.assertTrue(_check_section_name(b"foo")) self.assertTrue(_check_section_name(b"foo-bar")) self.assertTrue(_check_section_name(b"bar.bar")) class SubmodulesTests(TestCase): def testSubmodules(self): cf = ConfigFile.from_file( BytesIO( b"""\ [submodule "core/lib"] \tpath = core/lib \turl = https://github.com/phhusson/QuasselC.git """ ) ) got = list(parse_submodules(cf)) self.assertEqual( - [(b"core/lib", b"https://github.com/phhusson/QuasselC.git", b"core/lib")], + [ + ( + b"core/lib", + b"https://github.com/phhusson/QuasselC.git", + b"core/lib", + ) + ], got, ) diff --git a/dulwich/tests/test_diff_tree.py b/dulwich/tests/test_diff_tree.py index bd53ea89..20cecaad 100644 --- a/dulwich/tests/test_diff_tree.py +++ b/dulwich/tests/test_diff_tree.py @@ -1,1138 +1,1157 @@ # test_diff_tree.py -- Tests for file and tree diff utilities. # Copyright (C) 2010 Google, Inc. # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Tests for file and tree diff utilities.""" from itertools import permutations from dulwich.diff_tree import ( CHANGE_MODIFY, CHANGE_RENAME, CHANGE_COPY, CHANGE_UNCHANGED, TreeChange, _merge_entries, _merge_entries_py, tree_changes, tree_changes_for_merge, _count_blocks, _count_blocks_py, _similarity_score, _tree_change_key, RenameDetector, _is_tree, _is_tree_py, ) from dulwich.index import ( commit_tree, ) from dulwich.object_store import ( MemoryObjectStore, ) from dulwich.objects import ( ShaFile, Blob, TreeEntry, Tree, ) from dulwich.tests import ( TestCase, ) from dulwich.tests.utils import ( F, make_object, functest_builder, ext_functest_builder, ) class DiffTestCase(TestCase): def setUp(self): super(DiffTestCase, self).setUp() self.store = MemoryObjectStore() self.empty_tree = self.commit_tree([]) def commit_tree(self, entries): commit_blobs = [] for entry in entries: if len(entry) == 2: path, obj = entry mode = F else: path, obj, mode = entry if isinstance(obj, Blob): self.store.add_object(obj) sha = obj.id else: sha = obj commit_blobs.append((path, sha, mode)) return self.store[commit_tree(self.store, commit_blobs)] class TreeChangesTest(DiffTestCase): def setUp(self): super(TreeChangesTest, self).setUp() self.detector = RenameDetector(self.store) def assertMergeFails(self, merge_entries, name, mode, sha): t = Tree() t[name] = (mode, sha) self.assertRaises((TypeError, ValueError), merge_entries, "", t, t) def _do_test_merge_entries(self, merge_entries): blob_a1 = make_object(Blob, data=b"a1") blob_a2 = make_object(Blob, data=b"a2") blob_b1 = make_object(Blob, data=b"b1") blob_c2 = make_object(Blob, data=b"c2") tree1 = self.commit_tree([(b"a", blob_a1, 0o100644), (b"b", blob_b1, 0o100755)]) tree2 = self.commit_tree([(b"a", blob_a2, 0o100644), (b"c", blob_c2, 0o100755)]) self.assertEqual([], merge_entries(b"", self.empty_tree, self.empty_tree)) self.assertEqual( [ ((None, None, None), (b"a", 0o100644, blob_a1.id)), ((None, None, None), (b"b", 0o100755, blob_b1.id)), ], merge_entries(b"", self.empty_tree, tree1), ) self.assertEqual( [ ((None, None, None), (b"x/a", 0o100644, blob_a1.id)), ((None, None, None), (b"x/b", 0o100755, blob_b1.id)), ], merge_entries(b"x", self.empty_tree, tree1), ) self.assertEqual( [ ((b"a", 0o100644, blob_a2.id), (None, None, None)), ((b"c", 0o100755, blob_c2.id), (None, None, None)), ], merge_entries(b"", tree2, self.empty_tree), ) self.assertEqual( [ ((b"a", 0o100644, blob_a1.id), (b"a", 0o100644, blob_a2.id)), ((b"b", 0o100755, blob_b1.id), (None, None, None)), ((None, None, None), (b"c", 0o100755, blob_c2.id)), ], merge_entries(b"", tree1, tree2), ) self.assertEqual( [ ((b"a", 0o100644, blob_a2.id), (b"a", 0o100644, blob_a1.id)), ((None, None, None), (b"b", 0o100755, blob_b1.id)), ((b"c", 0o100755, blob_c2.id), (None, None, None)), ], merge_entries(b"", tree2, tree1), ) self.assertMergeFails(merge_entries, 0xDEADBEEF, 0o100644, "1" * 40) self.assertMergeFails(merge_entries, b"a", b"deadbeef", "1" * 40) self.assertMergeFails(merge_entries, b"a", 0o100644, 0xDEADBEEF) test_merge_entries = functest_builder(_do_test_merge_entries, _merge_entries_py) test_merge_entries_extension = ext_functest_builder( _do_test_merge_entries, _merge_entries ) def _do_test_is_tree(self, is_tree): self.assertFalse(is_tree(TreeEntry(None, None, None))) self.assertFalse(is_tree(TreeEntry(b"a", 0o100644, b"a" * 40))) self.assertFalse(is_tree(TreeEntry(b"a", 0o100755, b"a" * 40))) self.assertFalse(is_tree(TreeEntry(b"a", 0o120000, b"a" * 40))) self.assertTrue(is_tree(TreeEntry(b"a", 0o040000, b"a" * 40))) self.assertRaises(TypeError, is_tree, TreeEntry(b"a", b"x", b"a" * 40)) self.assertRaises(AttributeError, is_tree, 1234) test_is_tree = functest_builder(_do_test_is_tree, _is_tree_py) test_is_tree_extension = ext_functest_builder(_do_test_is_tree, _is_tree) def assertChangesEqual(self, expected, tree1, tree2, **kwargs): actual = list(tree_changes(self.store, tree1.id, tree2.id, **kwargs)) self.assertEqual(expected, actual) # For brevity, the following tests use tuples instead of TreeEntry objects. def test_tree_changes_empty(self): self.assertChangesEqual([], self.empty_tree, self.empty_tree) def test_tree_changes_no_changes(self): blob = make_object(Blob, data=b"blob") tree = self.commit_tree([(b"a", blob), (b"b/c", blob)]) self.assertChangesEqual([], self.empty_tree, self.empty_tree) self.assertChangesEqual([], tree, tree) self.assertChangesEqual( [ TreeChange(CHANGE_UNCHANGED, (b"a", F, blob.id), (b"a", F, blob.id)), TreeChange( - CHANGE_UNCHANGED, (b"b/c", F, blob.id), (b"b/c", F, blob.id) + CHANGE_UNCHANGED, + (b"b/c", F, blob.id), + (b"b/c", F, blob.id), ), ], tree, tree, want_unchanged=True, ) def test_tree_changes_add_delete(self): blob_a = make_object(Blob, data=b"a") blob_b = make_object(Blob, data=b"b") tree = self.commit_tree([(b"a", blob_a, 0o100644), (b"x/b", blob_b, 0o100755)]) self.assertChangesEqual( [ TreeChange.add((b"a", 0o100644, blob_a.id)), TreeChange.add((b"x/b", 0o100755, blob_b.id)), ], self.empty_tree, tree, ) self.assertChangesEqual( [ TreeChange.delete((b"a", 0o100644, blob_a.id)), TreeChange.delete((b"x/b", 0o100755, blob_b.id)), ], tree, self.empty_tree, ) def test_tree_changes_modify_contents(self): blob_a1 = make_object(Blob, data=b"a1") blob_a2 = make_object(Blob, data=b"a2") tree1 = self.commit_tree([(b"a", blob_a1)]) tree2 = self.commit_tree([(b"a", blob_a2)]) self.assertChangesEqual( [TreeChange(CHANGE_MODIFY, (b"a", F, blob_a1.id), (b"a", F, blob_a2.id))], tree1, tree2, ) def test_tree_changes_modify_mode(self): blob_a = make_object(Blob, data=b"a") tree1 = self.commit_tree([(b"a", blob_a, 0o100644)]) tree2 = self.commit_tree([(b"a", blob_a, 0o100755)]) self.assertChangesEqual( [ TreeChange( CHANGE_MODIFY, (b"a", 0o100644, blob_a.id), (b"a", 0o100755, blob_a.id), ) ], tree1, tree2, ) def test_tree_changes_change_type(self): blob_a1 = make_object(Blob, data=b"a") blob_a2 = make_object(Blob, data=b"/foo/bar") tree1 = self.commit_tree([(b"a", blob_a1, 0o100644)]) tree2 = self.commit_tree([(b"a", blob_a2, 0o120000)]) self.assertChangesEqual( [ TreeChange.delete((b"a", 0o100644, blob_a1.id)), TreeChange.add((b"a", 0o120000, blob_a2.id)), ], tree1, tree2, ) def test_tree_changes_change_type_same(self): blob_a1 = make_object(Blob, data=b"a") blob_a2 = make_object(Blob, data=b"/foo/bar") tree1 = self.commit_tree([(b"a", blob_a1, 0o100644)]) tree2 = self.commit_tree([(b"a", blob_a2, 0o120000)]) self.assertChangesEqual( [ TreeChange( CHANGE_MODIFY, (b"a", 0o100644, blob_a1.id), (b"a", 0o120000, blob_a2.id), ) ], tree1, tree2, change_type_same=True, ) def test_tree_changes_to_tree(self): blob_a = make_object(Blob, data=b"a") blob_x = make_object(Blob, data=b"x") tree1 = self.commit_tree([(b"a", blob_a)]) tree2 = self.commit_tree([(b"a/x", blob_x)]) self.assertChangesEqual( [ TreeChange.delete((b"a", F, blob_a.id)), TreeChange.add((b"a/x", F, blob_x.id)), ], tree1, tree2, ) def test_tree_changes_complex(self): blob_a_1 = make_object(Blob, data=b"a1_1") blob_bx1_1 = make_object(Blob, data=b"bx1_1") blob_bx2_1 = make_object(Blob, data=b"bx2_1") blob_by1_1 = make_object(Blob, data=b"by1_1") blob_by2_1 = make_object(Blob, data=b"by2_1") tree1 = self.commit_tree( [ (b"a", blob_a_1), (b"b/x/1", blob_bx1_1), (b"b/x/2", blob_bx2_1), (b"b/y/1", blob_by1_1), (b"b/y/2", blob_by2_1), ] ) blob_a_2 = make_object(Blob, data=b"a1_2") blob_bx1_2 = blob_bx1_1 blob_by_2 = make_object(Blob, data=b"by_2") blob_c_2 = make_object(Blob, data=b"c_2") tree2 = self.commit_tree( [ (b"a", blob_a_2), (b"b/x/1", blob_bx1_2), (b"b/y", blob_by_2), (b"c", blob_c_2), ] ) self.assertChangesEqual( [ TreeChange( - CHANGE_MODIFY, (b"a", F, blob_a_1.id), (b"a", F, blob_a_2.id) + CHANGE_MODIFY, + (b"a", F, blob_a_1.id), + (b"a", F, blob_a_2.id), ), TreeChange.delete((b"b/x/2", F, blob_bx2_1.id)), TreeChange.add((b"b/y", F, blob_by_2.id)), TreeChange.delete((b"b/y/1", F, blob_by1_1.id)), TreeChange.delete((b"b/y/2", F, blob_by2_1.id)), TreeChange.add((b"c", F, blob_c_2.id)), ], tree1, tree2, ) def test_tree_changes_name_order(self): blob = make_object(Blob, data=b"a") tree1 = self.commit_tree([(b"a", blob), (b"a.", blob), (b"a..", blob)]) # Tree order is the reverse of this, so if we used tree order, 'a..' # would not be merged. tree2 = self.commit_tree([(b"a/x", blob), (b"a./x", blob), (b"a..", blob)]) self.assertChangesEqual( [ TreeChange.delete((b"a", F, blob.id)), TreeChange.add((b"a/x", F, blob.id)), TreeChange.delete((b"a.", F, blob.id)), TreeChange.add((b"a./x", F, blob.id)), ], tree1, tree2, ) def test_tree_changes_prune(self): blob_a1 = make_object(Blob, data=b"a1") blob_a2 = make_object(Blob, data=b"a2") blob_x = make_object(Blob, data=b"x") tree1 = self.commit_tree([(b"a", blob_a1), (b"b/x", blob_x)]) tree2 = self.commit_tree([(b"a", blob_a2), (b"b/x", blob_x)]) # Remove identical items so lookups will fail unless we prune. subtree = self.store[tree1[b"b"][1]] for entry in subtree.items(): del self.store[entry.sha] del self.store[subtree.id] self.assertChangesEqual( [TreeChange(CHANGE_MODIFY, (b"a", F, blob_a1.id), (b"a", F, blob_a2.id))], tree1, tree2, ) def test_tree_changes_rename_detector(self): blob_a1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob_a2 = make_object(Blob, data=b"a\nb\nc\ne\n") blob_b = make_object(Blob, data=b"b") tree1 = self.commit_tree([(b"a", blob_a1), (b"b", blob_b)]) tree2 = self.commit_tree([(b"c", blob_a2), (b"b", blob_b)]) detector = RenameDetector(self.store) self.assertChangesEqual( [ TreeChange.delete((b"a", F, blob_a1.id)), TreeChange.add((b"c", F, blob_a2.id)), ], tree1, tree2, ) self.assertChangesEqual( [ TreeChange.delete((b"a", F, blob_a1.id)), TreeChange( - CHANGE_UNCHANGED, (b"b", F, blob_b.id), (b"b", F, blob_b.id) + CHANGE_UNCHANGED, + (b"b", F, blob_b.id), + (b"b", F, blob_b.id), ), TreeChange.add((b"c", F, blob_a2.id)), ], tree1, tree2, want_unchanged=True, ) self.assertChangesEqual( [TreeChange(CHANGE_RENAME, (b"a", F, blob_a1.id), (b"c", F, blob_a2.id))], tree1, tree2, rename_detector=detector, ) self.assertChangesEqual( [ TreeChange(CHANGE_RENAME, (b"a", F, blob_a1.id), (b"c", F, blob_a2.id)), TreeChange( - CHANGE_UNCHANGED, (b"b", F, blob_b.id), (b"b", F, blob_b.id) + CHANGE_UNCHANGED, + (b"b", F, blob_b.id), + (b"b", F, blob_b.id), ), ], tree1, tree2, rename_detector=detector, want_unchanged=True, ) def assertChangesForMergeEqual(self, expected, parent_trees, merge_tree, **kwargs): parent_tree_ids = [t.id for t in parent_trees] actual = list( tree_changes_for_merge(self.store, parent_tree_ids, merge_tree.id, **kwargs) ) self.assertEqual(expected, actual) parent_tree_ids.reverse() expected = [list(reversed(cs)) for cs in expected] actual = list( tree_changes_for_merge(self.store, parent_tree_ids, merge_tree.id, **kwargs) ) self.assertEqual(expected, actual) def test_tree_changes_for_merge_add_no_conflict(self): blob = make_object(Blob, data=b"blob") parent1 = self.commit_tree([]) parent2 = merge = self.commit_tree([(b"a", blob)]) self.assertChangesForMergeEqual([], [parent1, parent2], merge) self.assertChangesForMergeEqual([], [parent2, parent2], merge) def test_tree_changes_for_merge_add_modify_conflict(self): blob1 = make_object(Blob, data=b"1") blob2 = make_object(Blob, data=b"2") parent1 = self.commit_tree([]) parent2 = self.commit_tree([(b"a", blob1)]) merge = self.commit_tree([(b"a", blob2)]) self.assertChangesForMergeEqual( [ [ TreeChange.add((b"a", F, blob2.id)), TreeChange(CHANGE_MODIFY, (b"a", F, blob1.id), (b"a", F, blob2.id)), ] ], [parent1, parent2], merge, ) def test_tree_changes_for_merge_modify_modify_conflict(self): blob1 = make_object(Blob, data=b"1") blob2 = make_object(Blob, data=b"2") blob3 = make_object(Blob, data=b"3") parent1 = self.commit_tree([(b"a", blob1)]) parent2 = self.commit_tree([(b"a", blob2)]) merge = self.commit_tree([(b"a", blob3)]) self.assertChangesForMergeEqual( [ [ TreeChange(CHANGE_MODIFY, (b"a", F, blob1.id), (b"a", F, blob3.id)), TreeChange(CHANGE_MODIFY, (b"a", F, blob2.id), (b"a", F, blob3.id)), ] ], [parent1, parent2], merge, ) def test_tree_changes_for_merge_modify_no_conflict(self): blob1 = make_object(Blob, data=b"1") blob2 = make_object(Blob, data=b"2") parent1 = self.commit_tree([(b"a", blob1)]) parent2 = merge = self.commit_tree([(b"a", blob2)]) self.assertChangesForMergeEqual([], [parent1, parent2], merge) def test_tree_changes_for_merge_delete_delete_conflict(self): blob1 = make_object(Blob, data=b"1") blob2 = make_object(Blob, data=b"2") parent1 = self.commit_tree([(b"a", blob1)]) parent2 = self.commit_tree([(b"a", blob2)]) merge = self.commit_tree([]) self.assertChangesForMergeEqual( [ [ TreeChange.delete((b"a", F, blob1.id)), TreeChange.delete((b"a", F, blob2.id)), ] ], [parent1, parent2], merge, ) def test_tree_changes_for_merge_delete_no_conflict(self): blob = make_object(Blob, data=b"blob") has = self.commit_tree([(b"a", blob)]) doesnt_have = self.commit_tree([]) self.assertChangesForMergeEqual([], [has, has], doesnt_have) self.assertChangesForMergeEqual([], [has, doesnt_have], doesnt_have) def test_tree_changes_for_merge_octopus_no_conflict(self): r = list(range(5)) blobs = [make_object(Blob, data=bytes(i)) for i in r] parents = [self.commit_tree([(b"a", blobs[i])]) for i in r] for i in r: # Take the SHA from each of the parents. self.assertChangesForMergeEqual([], parents, parents[i]) def test_tree_changes_for_merge_octopus_modify_conflict(self): # Because the octopus merge strategy is limited, I doubt it's possible # to create this with the git command line. But the output is well- # defined, so test it anyway. r = list(range(5)) parent_blobs = [make_object(Blob, data=bytes(i)) for i in r] merge_blob = make_object(Blob, data=b"merge") parents = [self.commit_tree([(b"a", parent_blobs[i])]) for i in r] merge = self.commit_tree([(b"a", merge_blob)]) expected = [ [ TreeChange( CHANGE_MODIFY, (b"a", F, parent_blobs[i].id), (b"a", F, merge_blob.id), ) for i in r ] ] self.assertChangesForMergeEqual(expected, parents, merge) def test_tree_changes_for_merge_octopus_delete(self): blob1 = make_object(Blob, data=b"1") blob2 = make_object(Blob, data=b"3") parent1 = self.commit_tree([(b"a", blob1)]) parent2 = self.commit_tree([(b"a", blob2)]) parent3 = merge = self.commit_tree([]) self.assertChangesForMergeEqual([], [parent1, parent1, parent1], merge) self.assertChangesForMergeEqual([], [parent1, parent1, parent3], merge) self.assertChangesForMergeEqual([], [parent1, parent3, parent3], merge) self.assertChangesForMergeEqual( [ [ TreeChange.delete((b"a", F, blob1.id)), TreeChange.delete((b"a", F, blob2.id)), None, ] ], [parent1, parent2, parent3], merge, ) def test_tree_changes_for_merge_add_add_same_conflict(self): blob = make_object(Blob, data=b"a\nb\nc\nd\n") parent1 = self.commit_tree([(b"a", blob)]) parent2 = self.commit_tree([]) merge = self.commit_tree([(b"b", blob)]) add = TreeChange.add((b"b", F, blob.id)) self.assertChangesForMergeEqual([[add, add]], [parent1, parent2], merge) def test_tree_changes_for_merge_add_exact_rename_conflict(self): blob = make_object(Blob, data=b"a\nb\nc\nd\n") parent1 = self.commit_tree([(b"a", blob)]) parent2 = self.commit_tree([]) merge = self.commit_tree([(b"b", blob)]) self.assertChangesForMergeEqual( [ [ TreeChange(CHANGE_RENAME, (b"a", F, blob.id), (b"b", F, blob.id)), TreeChange.add((b"b", F, blob.id)), ] ], [parent1, parent2], merge, rename_detector=self.detector, ) def test_tree_changes_for_merge_add_content_rename_conflict(self): blob1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob2 = make_object(Blob, data=b"a\nb\nc\ne\n") parent1 = self.commit_tree([(b"a", blob1)]) parent2 = self.commit_tree([]) merge = self.commit_tree([(b"b", blob2)]) self.assertChangesForMergeEqual( [ [ TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"b", F, blob2.id)), TreeChange.add((b"b", F, blob2.id)), ] ], [parent1, parent2], merge, rename_detector=self.detector, ) def test_tree_changes_for_merge_modify_rename_conflict(self): blob1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob2 = make_object(Blob, data=b"a\nb\nc\ne\n") parent1 = self.commit_tree([(b"a", blob1)]) parent2 = self.commit_tree([(b"b", blob1)]) merge = self.commit_tree([(b"b", blob2)]) self.assertChangesForMergeEqual( [ [ TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"b", F, blob2.id)), TreeChange(CHANGE_MODIFY, (b"b", F, blob1.id), (b"b", F, blob2.id)), ] ], [parent1, parent2], merge, rename_detector=self.detector, ) class RenameDetectionTest(DiffTestCase): def _do_test_count_blocks(self, count_blocks): blob = make_object(Blob, data=b"a\nb\na\n") self.assertBlockCountEqual({b"a\n": 4, b"b\n": 2}, count_blocks(blob)) test_count_blocks = functest_builder(_do_test_count_blocks, _count_blocks_py) test_count_blocks_extension = ext_functest_builder( _do_test_count_blocks, _count_blocks ) def _do_test_count_blocks_no_newline(self, count_blocks): blob = make_object(Blob, data=b"a\na") self.assertBlockCountEqual({b"a\n": 2, b"a": 1}, _count_blocks(blob)) test_count_blocks_no_newline = functest_builder( _do_test_count_blocks_no_newline, _count_blocks_py ) test_count_blocks_no_newline_extension = ext_functest_builder( _do_test_count_blocks_no_newline, _count_blocks ) def assertBlockCountEqual(self, expected, got): self.assertEqual( {(hash(l) & 0xFFFFFFFF): c for (l, c) in expected.items()}, {(h & 0xFFFFFFFF): c for (h, c) in got.items()}, ) def _do_test_count_blocks_chunks(self, count_blocks): blob = ShaFile.from_raw_chunks(Blob.type_num, [b"a\nb", b"\na\n"]) self.assertBlockCountEqual({b"a\n": 4, b"b\n": 2}, _count_blocks(blob)) test_count_blocks_chunks = functest_builder( _do_test_count_blocks_chunks, _count_blocks_py ) test_count_blocks_chunks_extension = ext_functest_builder( _do_test_count_blocks_chunks, _count_blocks ) def _do_test_count_blocks_long_lines(self, count_blocks): a = b"a" * 64 data = a + b"xxx\ny\n" + a + b"zzz\n" blob = make_object(Blob, data=data) self.assertBlockCountEqual( - {b"a" * 64: 128, b"xxx\n": 4, b"y\n": 2, b"zzz\n": 4}, _count_blocks(blob) + {b"a" * 64: 128, b"xxx\n": 4, b"y\n": 2, b"zzz\n": 4}, + _count_blocks(blob), ) test_count_blocks_long_lines = functest_builder( _do_test_count_blocks_long_lines, _count_blocks_py ) test_count_blocks_long_lines_extension = ext_functest_builder( _do_test_count_blocks_long_lines, _count_blocks ) def assertSimilar(self, expected_score, blob1, blob2): self.assertEqual(expected_score, _similarity_score(blob1, blob2)) self.assertEqual(expected_score, _similarity_score(blob2, blob1)) def test_similarity_score(self): blob0 = make_object(Blob, data=b"") blob1 = make_object(Blob, data=b"ab\ncd\ncd\n") blob2 = make_object(Blob, data=b"ab\n") blob3 = make_object(Blob, data=b"cd\n") blob4 = make_object(Blob, data=b"cd\ncd\n") self.assertSimilar(100, blob0, blob0) self.assertSimilar(0, blob0, blob1) self.assertSimilar(33, blob1, blob2) self.assertSimilar(33, blob1, blob3) self.assertSimilar(66, blob1, blob4) self.assertSimilar(0, blob2, blob3) self.assertSimilar(50, blob3, blob4) def test_similarity_score_cache(self): blob1 = make_object(Blob, data=b"ab\ncd\n") blob2 = make_object(Blob, data=b"ab\n") block_cache = {} self.assertEqual(50, _similarity_score(blob1, blob2, block_cache=block_cache)) self.assertEqual(set([blob1.id, blob2.id]), set(block_cache)) def fail_chunks(): self.fail("Unexpected call to as_raw_chunks()") blob1.as_raw_chunks = blob2.as_raw_chunks = fail_chunks blob1.raw_length = lambda: 6 blob2.raw_length = lambda: 3 self.assertEqual(50, _similarity_score(blob1, blob2, block_cache=block_cache)) def test_tree_entry_sort(self): sha = "abcd" * 10 expected_entries = [ TreeChange.add(TreeEntry(b"aaa", F, sha)), TreeChange( - CHANGE_COPY, TreeEntry(b"bbb", F, sha), TreeEntry(b"aab", F, sha) + CHANGE_COPY, + TreeEntry(b"bbb", F, sha), + TreeEntry(b"aab", F, sha), ), TreeChange( CHANGE_MODIFY, TreeEntry(b"bbb", F, sha), TreeEntry(b"bbb", F, b"dabc" * 10), ), TreeChange( - CHANGE_RENAME, TreeEntry(b"bbc", F, sha), TreeEntry(b"ddd", F, sha) + CHANGE_RENAME, + TreeEntry(b"bbc", F, sha), + TreeEntry(b"ddd", F, sha), ), TreeChange.delete(TreeEntry(b"ccc", F, sha)), ] for perm in permutations(expected_entries): self.assertEqual(expected_entries, sorted(perm, key=_tree_change_key)) def detect_renames(self, tree1, tree2, want_unchanged=False, **kwargs): detector = RenameDetector(self.store, **kwargs) return detector.changes_with_renames( tree1.id, tree2.id, want_unchanged=want_unchanged ) def test_no_renames(self): blob1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob2 = make_object(Blob, data=b"a\nb\ne\nf\n") blob3 = make_object(Blob, data=b"a\nb\ng\nh\n") tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)]) tree2 = self.commit_tree([(b"a", blob1), (b"b", blob3)]) self.assertEqual( [TreeChange(CHANGE_MODIFY, (b"b", F, blob2.id), (b"b", F, blob3.id))], self.detect_renames(tree1, tree2), ) def test_exact_rename_one_to_one(self): blob1 = make_object(Blob, data=b"1") blob2 = make_object(Blob, data=b"2") tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)]) tree2 = self.commit_tree([(b"c", blob1), (b"d", blob2)]) self.assertEqual( [ TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"c", F, blob1.id)), TreeChange(CHANGE_RENAME, (b"b", F, blob2.id), (b"d", F, blob2.id)), ], self.detect_renames(tree1, tree2), ) def test_exact_rename_split_different_type(self): blob = make_object(Blob, data=b"/foo") tree1 = self.commit_tree([(b"a", blob, 0o100644)]) tree2 = self.commit_tree([(b"a", blob, 0o120000)]) self.assertEqual( [ TreeChange.add((b"a", 0o120000, blob.id)), TreeChange.delete((b"a", 0o100644, blob.id)), ], self.detect_renames(tree1, tree2), ) def test_exact_rename_and_different_type(self): blob1 = make_object(Blob, data=b"1") blob2 = make_object(Blob, data=b"2") tree1 = self.commit_tree([(b"a", blob1)]) tree2 = self.commit_tree([(b"a", blob2, 0o120000), (b"b", blob1)]) self.assertEqual( [ TreeChange.add((b"a", 0o120000, blob2.id)), TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"b", F, blob1.id)), ], self.detect_renames(tree1, tree2), ) def test_exact_rename_one_to_many(self): blob = make_object(Blob, data=b"1") tree1 = self.commit_tree([(b"a", blob)]) tree2 = self.commit_tree([(b"b", blob), (b"c", blob)]) self.assertEqual( [ TreeChange(CHANGE_RENAME, (b"a", F, blob.id), (b"b", F, blob.id)), TreeChange(CHANGE_COPY, (b"a", F, blob.id), (b"c", F, blob.id)), ], self.detect_renames(tree1, tree2), ) def test_exact_rename_many_to_one(self): blob = make_object(Blob, data=b"1") tree1 = self.commit_tree([(b"a", blob), (b"b", blob)]) tree2 = self.commit_tree([(b"c", blob)]) self.assertEqual( [ TreeChange(CHANGE_RENAME, (b"a", F, blob.id), (b"c", F, blob.id)), TreeChange.delete((b"b", F, blob.id)), ], self.detect_renames(tree1, tree2), ) def test_exact_rename_many_to_many(self): blob = make_object(Blob, data=b"1") tree1 = self.commit_tree([(b"a", blob), (b"b", blob)]) tree2 = self.commit_tree([(b"c", blob), (b"d", blob), (b"e", blob)]) self.assertEqual( [ TreeChange(CHANGE_RENAME, (b"a", F, blob.id), (b"c", F, blob.id)), TreeChange(CHANGE_COPY, (b"a", F, blob.id), (b"e", F, blob.id)), TreeChange(CHANGE_RENAME, (b"b", F, blob.id), (b"d", F, blob.id)), ], self.detect_renames(tree1, tree2), ) def test_exact_copy_modify(self): blob1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob2 = make_object(Blob, data=b"a\nb\nc\ne\n") tree1 = self.commit_tree([(b"a", blob1)]) tree2 = self.commit_tree([(b"a", blob2), (b"b", blob1)]) self.assertEqual( [ TreeChange(CHANGE_MODIFY, (b"a", F, blob1.id), (b"a", F, blob2.id)), TreeChange(CHANGE_COPY, (b"a", F, blob1.id), (b"b", F, blob1.id)), ], self.detect_renames(tree1, tree2), ) def test_exact_copy_change_mode(self): blob = make_object(Blob, data=b"a\nb\nc\nd\n") tree1 = self.commit_tree([(b"a", blob)]) tree2 = self.commit_tree([(b"a", blob, 0o100755), (b"b", blob)]) self.assertEqual( [ TreeChange( - CHANGE_MODIFY, (b"a", F, blob.id), (b"a", 0o100755, blob.id) + CHANGE_MODIFY, + (b"a", F, blob.id), + (b"a", 0o100755, blob.id), ), TreeChange(CHANGE_COPY, (b"a", F, blob.id), (b"b", F, blob.id)), ], self.detect_renames(tree1, tree2), ) def test_rename_threshold(self): blob1 = make_object(Blob, data=b"a\nb\nc\n") blob2 = make_object(Blob, data=b"a\nb\nd\n") tree1 = self.commit_tree([(b"a", blob1)]) tree2 = self.commit_tree([(b"b", blob2)]) self.assertEqual( [TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"b", F, blob2.id))], self.detect_renames(tree1, tree2, rename_threshold=50), ) self.assertEqual( [ TreeChange.delete((b"a", F, blob1.id)), TreeChange.add((b"b", F, blob2.id)), ], self.detect_renames(tree1, tree2, rename_threshold=75), ) def test_content_rename_max_files(self): blob1 = make_object(Blob, data=b"a\nb\nc\nd") blob4 = make_object(Blob, data=b"a\nb\nc\ne\n") blob2 = make_object(Blob, data=b"e\nf\ng\nh\n") blob3 = make_object(Blob, data=b"e\nf\ng\ni\n") tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)]) tree2 = self.commit_tree([(b"c", blob3), (b"d", blob4)]) self.assertEqual( [ TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"d", F, blob4.id)), TreeChange(CHANGE_RENAME, (b"b", F, blob2.id), (b"c", F, blob3.id)), ], self.detect_renames(tree1, tree2), ) self.assertEqual( [ TreeChange.delete((b"a", F, blob1.id)), TreeChange.delete((b"b", F, blob2.id)), TreeChange.add((b"c", F, blob3.id)), TreeChange.add((b"d", F, blob4.id)), ], self.detect_renames(tree1, tree2, max_files=1), ) def test_content_rename_one_to_one(self): b11 = make_object(Blob, data=b"a\nb\nc\nd\n") b12 = make_object(Blob, data=b"a\nb\nc\ne\n") b21 = make_object(Blob, data=b"e\nf\ng\n\nh") b22 = make_object(Blob, data=b"e\nf\ng\n\ni") tree1 = self.commit_tree([(b"a", b11), (b"b", b21)]) tree2 = self.commit_tree([(b"c", b12), (b"d", b22)]) self.assertEqual( [ TreeChange(CHANGE_RENAME, (b"a", F, b11.id), (b"c", F, b12.id)), TreeChange(CHANGE_RENAME, (b"b", F, b21.id), (b"d", F, b22.id)), ], self.detect_renames(tree1, tree2), ) def test_content_rename_one_to_one_ordering(self): blob1 = make_object(Blob, data=b"a\nb\nc\nd\ne\nf\n") blob2 = make_object(Blob, data=b"a\nb\nc\nd\ng\nh\n") # 6/10 match to blob1, 8/10 match to blob2 blob3 = make_object(Blob, data=b"a\nb\nc\nd\ng\ni\n") tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)]) tree2 = self.commit_tree([(b"c", blob3)]) self.assertEqual( [ TreeChange.delete((b"a", F, blob1.id)), TreeChange(CHANGE_RENAME, (b"b", F, blob2.id), (b"c", F, blob3.id)), ], self.detect_renames(tree1, tree2), ) tree3 = self.commit_tree([(b"a", blob2), (b"b", blob1)]) tree4 = self.commit_tree([(b"c", blob3)]) self.assertEqual( [ TreeChange(CHANGE_RENAME, (b"a", F, blob2.id), (b"c", F, blob3.id)), TreeChange.delete((b"b", F, blob1.id)), ], self.detect_renames(tree3, tree4), ) def test_content_rename_one_to_many(self): blob1 = make_object(Blob, data=b"aa\nb\nc\nd\ne\n") blob2 = make_object(Blob, data=b"ab\nb\nc\nd\ne\n") # 8/11 match blob3 = make_object(Blob, data=b"aa\nb\nc\nd\nf\n") # 9/11 match tree1 = self.commit_tree([(b"a", blob1)]) tree2 = self.commit_tree([(b"b", blob2), (b"c", blob3)]) self.assertEqual( [ TreeChange(CHANGE_COPY, (b"a", F, blob1.id), (b"b", F, blob2.id)), TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"c", F, blob3.id)), ], self.detect_renames(tree1, tree2), ) def test_content_rename_many_to_one(self): blob1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob2 = make_object(Blob, data=b"a\nb\nc\ne\n") blob3 = make_object(Blob, data=b"a\nb\nc\nf\n") tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)]) tree2 = self.commit_tree([(b"c", blob3)]) self.assertEqual( [ TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"c", F, blob3.id)), TreeChange.delete((b"b", F, blob2.id)), ], self.detect_renames(tree1, tree2), ) def test_content_rename_many_to_many(self): blob1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob2 = make_object(Blob, data=b"a\nb\nc\ne\n") blob3 = make_object(Blob, data=b"a\nb\nc\nf\n") blob4 = make_object(Blob, data=b"a\nb\nc\ng\n") tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)]) tree2 = self.commit_tree([(b"c", blob3), (b"d", blob4)]) # TODO(dborowitz): Distribute renames rather than greedily choosing # copies. self.assertEqual( [ TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"c", F, blob3.id)), TreeChange(CHANGE_COPY, (b"a", F, blob1.id), (b"d", F, blob4.id)), TreeChange.delete((b"b", F, blob2.id)), ], self.detect_renames(tree1, tree2), ) def test_content_rename_with_more_deletions(self): blob1 = make_object(Blob, data=b"") tree1 = self.commit_tree( [(b"a", blob1), (b"b", blob1), (b"c", blob1), (b"d", blob1)] ) tree2 = self.commit_tree([(b"e", blob1), (b"f", blob1), (b"g", blob1)]) self.maxDiff = None self.assertEqual( [ TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"e", F, blob1.id)), TreeChange(CHANGE_RENAME, (b"b", F, blob1.id), (b"f", F, blob1.id)), TreeChange(CHANGE_RENAME, (b"c", F, blob1.id), (b"g", F, blob1.id)), TreeChange.delete((b"d", F, blob1.id)), ], self.detect_renames(tree1, tree2), ) def test_content_rename_gitlink(self): blob1 = make_object(Blob, data=b"blob1") blob2 = make_object(Blob, data=b"blob2") link1 = b"1" * 40 link2 = b"2" * 40 tree1 = self.commit_tree([(b"a", blob1), (b"b", link1, 0o160000)]) tree2 = self.commit_tree([(b"c", blob2), (b"d", link2, 0o160000)]) self.assertEqual( [ TreeChange.delete((b"a", 0o100644, blob1.id)), TreeChange.delete((b"b", 0o160000, link1)), TreeChange.add((b"c", 0o100644, blob2.id)), TreeChange.add((b"d", 0o160000, link2)), ], self.detect_renames(tree1, tree2), ) def test_exact_rename_swap(self): blob1 = make_object(Blob, data=b"1") blob2 = make_object(Blob, data=b"2") tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)]) tree2 = self.commit_tree([(b"a", blob2), (b"b", blob1)]) self.assertEqual( [ TreeChange(CHANGE_MODIFY, (b"a", F, blob1.id), (b"a", F, blob2.id)), TreeChange(CHANGE_MODIFY, (b"b", F, blob2.id), (b"b", F, blob1.id)), ], self.detect_renames(tree1, tree2), ) self.assertEqual( [ TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"b", F, blob1.id)), TreeChange(CHANGE_RENAME, (b"b", F, blob2.id), (b"a", F, blob2.id)), ], self.detect_renames(tree1, tree2, rewrite_threshold=50), ) def test_content_rename_swap(self): blob1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob2 = make_object(Blob, data=b"e\nf\ng\nh\n") blob3 = make_object(Blob, data=b"a\nb\nc\ne\n") blob4 = make_object(Blob, data=b"e\nf\ng\ni\n") tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)]) tree2 = self.commit_tree([(b"a", blob4), (b"b", blob3)]) self.assertEqual( [ TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"b", F, blob3.id)), TreeChange(CHANGE_RENAME, (b"b", F, blob2.id), (b"a", F, blob4.id)), ], self.detect_renames(tree1, tree2, rewrite_threshold=60), ) def test_rewrite_threshold(self): blob1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob2 = make_object(Blob, data=b"a\nb\nc\ne\n") blob3 = make_object(Blob, data=b"a\nb\nf\ng\n") tree1 = self.commit_tree([(b"a", blob1)]) tree2 = self.commit_tree([(b"a", blob3), (b"b", blob2)]) no_renames = [ TreeChange(CHANGE_MODIFY, (b"a", F, blob1.id), (b"a", F, blob3.id)), TreeChange(CHANGE_COPY, (b"a", F, blob1.id), (b"b", F, blob2.id)), ] self.assertEqual(no_renames, self.detect_renames(tree1, tree2)) self.assertEqual( no_renames, self.detect_renames(tree1, tree2, rewrite_threshold=40) ) self.assertEqual( [ TreeChange.add((b"a", F, blob3.id)), TreeChange(CHANGE_RENAME, (b"a", F, blob1.id), (b"b", F, blob2.id)), ], self.detect_renames(tree1, tree2, rewrite_threshold=80), ) def test_find_copies_harder_exact(self): blob = make_object(Blob, data=b"blob") tree1 = self.commit_tree([(b"a", blob)]) tree2 = self.commit_tree([(b"a", blob), (b"b", blob)]) self.assertEqual( - [TreeChange.add((b"b", F, blob.id))], self.detect_renames(tree1, tree2) + [TreeChange.add((b"b", F, blob.id))], + self.detect_renames(tree1, tree2), ) self.assertEqual( [TreeChange(CHANGE_COPY, (b"a", F, blob.id), (b"b", F, blob.id))], self.detect_renames(tree1, tree2, find_copies_harder=True), ) def test_find_copies_harder_content(self): blob1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob2 = make_object(Blob, data=b"a\nb\nc\ne\n") tree1 = self.commit_tree([(b"a", blob1)]) tree2 = self.commit_tree([(b"a", blob1), (b"b", blob2)]) self.assertEqual( - [TreeChange.add((b"b", F, blob2.id))], self.detect_renames(tree1, tree2) + [TreeChange.add((b"b", F, blob2.id))], + self.detect_renames(tree1, tree2), ) self.assertEqual( [TreeChange(CHANGE_COPY, (b"a", F, blob1.id), (b"b", F, blob2.id))], self.detect_renames(tree1, tree2, find_copies_harder=True), ) def test_find_copies_harder_with_rewrites(self): blob_a1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob_a2 = make_object(Blob, data=b"f\ng\nh\ni\n") blob_b2 = make_object(Blob, data=b"a\nb\nc\ne\n") tree1 = self.commit_tree([(b"a", blob_a1)]) tree2 = self.commit_tree([(b"a", blob_a2), (b"b", blob_b2)]) self.assertEqual( [ TreeChange(CHANGE_MODIFY, (b"a", F, blob_a1.id), (b"a", F, blob_a2.id)), TreeChange(CHANGE_COPY, (b"a", F, blob_a1.id), (b"b", F, blob_b2.id)), ], self.detect_renames(tree1, tree2, find_copies_harder=True), ) self.assertEqual( [ TreeChange.add((b"a", F, blob_a2.id)), TreeChange(CHANGE_RENAME, (b"a", F, blob_a1.id), (b"b", F, blob_b2.id)), ], self.detect_renames( tree1, tree2, rewrite_threshold=50, find_copies_harder=True ), ) def test_reuse_detector(self): blob = make_object(Blob, data=b"blob") tree1 = self.commit_tree([(b"a", blob)]) tree2 = self.commit_tree([(b"b", blob)]) detector = RenameDetector(self.store) changes = [TreeChange(CHANGE_RENAME, (b"a", F, blob.id), (b"b", F, blob.id))] self.assertEqual(changes, detector.changes_with_renames(tree1.id, tree2.id)) self.assertEqual(changes, detector.changes_with_renames(tree1.id, tree2.id)) def test_want_unchanged(self): blob_a1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob_b = make_object(Blob, data=b"b") blob_c2 = make_object(Blob, data=b"a\nb\nc\ne\n") tree1 = self.commit_tree([(b"a", blob_a1), (b"b", blob_b)]) tree2 = self.commit_tree([(b"c", blob_c2), (b"b", blob_b)]) self.assertEqual( [TreeChange(CHANGE_RENAME, (b"a", F, blob_a1.id), (b"c", F, blob_c2.id))], self.detect_renames(tree1, tree2), ) self.assertEqual( [ TreeChange(CHANGE_RENAME, (b"a", F, blob_a1.id), (b"c", F, blob_c2.id)), TreeChange( - CHANGE_UNCHANGED, (b"b", F, blob_b.id), (b"b", F, blob_b.id) + CHANGE_UNCHANGED, + (b"b", F, blob_b.id), + (b"b", F, blob_b.id), ), ], self.detect_renames(tree1, tree2, want_unchanged=True), ) diff --git a/dulwich/tests/test_fastexport.py b/dulwich/tests/test_fastexport.py index 0d3d4653..09d55c26 100644 --- a/dulwich/tests/test_fastexport.py +++ b/dulwich/tests/test_fastexport.py @@ -1,305 +1,317 @@ # test_fastexport.py -- Fast export/import functionality # Copyright (C) 2010 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # from io import BytesIO import stat from dulwich.object_store import ( MemoryObjectStore, ) from dulwich.objects import ( Blob, Commit, Tree, ZERO_SHA, ) from dulwich.repo import ( MemoryRepo, ) from dulwich.tests import ( SkipTest, TestCase, ) from dulwich.tests.utils import ( build_commit_graph, ) class GitFastExporterTests(TestCase): """Tests for the GitFastExporter tests.""" def setUp(self): super(GitFastExporterTests, self).setUp() self.store = MemoryObjectStore() self.stream = BytesIO() try: from dulwich.fastexport import GitFastExporter except ImportError: raise SkipTest("python-fastimport not available") self.fastexporter = GitFastExporter(self.stream, self.store) def test_emit_blob(self): b = Blob() b.data = b"fooBAR" self.fastexporter.emit_blob(b) self.assertEqual(b"blob\nmark :1\ndata 6\nfooBAR\n", self.stream.getvalue()) def test_emit_commit(self): b = Blob() b.data = b"FOO" t = Tree() t.add(b"foo", stat.S_IFREG | 0o644, b.id) c = Commit() c.committer = c.author = b"Jelmer " c.author_time = c.commit_time = 1271345553 c.author_timezone = c.commit_timezone = 0 c.message = b"msg" c.tree = t.id self.store.add_objects([(b, None), (t, None), (c, None)]) self.fastexporter.emit_commit(c, b"refs/heads/master") self.assertEqual( b"""blob mark :1 data 3 FOO commit refs/heads/master mark :2 author Jelmer 1271345553 +0000 committer Jelmer 1271345553 +0000 data 3 msg M 644 :1 foo """, self.stream.getvalue(), ) class GitImportProcessorTests(TestCase): """Tests for the GitImportProcessor tests.""" def setUp(self): super(GitImportProcessorTests, self).setUp() self.repo = MemoryRepo() try: from dulwich.fastexport import GitImportProcessor except ImportError: raise SkipTest("python-fastimport not available") self.processor = GitImportProcessor(self.repo) def test_reset_handler(self): from fastimport import commands [c1] = build_commit_graph(self.repo.object_store, [[1]]) cmd = commands.ResetCommand(b"refs/heads/foo", c1.id) self.processor.reset_handler(cmd) self.assertEqual(c1.id, self.repo.get_refs()[b"refs/heads/foo"]) self.assertEqual(c1.id, self.processor.last_commit) def test_reset_handler_marker(self): from fastimport import commands [c1, c2] = build_commit_graph(self.repo.object_store, [[1], [2]]) self.processor.markers[b"10"] = c1.id cmd = commands.ResetCommand(b"refs/heads/foo", b":10") self.processor.reset_handler(cmd) self.assertEqual(c1.id, self.repo.get_refs()[b"refs/heads/foo"]) def test_reset_handler_default(self): from fastimport import commands [c1, c2] = build_commit_graph(self.repo.object_store, [[1], [2]]) cmd = commands.ResetCommand(b"refs/heads/foo", None) self.processor.reset_handler(cmd) self.assertEqual(ZERO_SHA, self.repo.get_refs()[b"refs/heads/foo"]) def test_commit_handler(self): from fastimport import commands cmd = commands.CommitCommand( b"refs/heads/foo", b"mrkr", (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600), (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600), b"FOO", None, [], [], ) self.processor.commit_handler(cmd) commit = self.repo[self.processor.last_commit] self.assertEqual(b"Jelmer ", commit.author) self.assertEqual(b"Jelmer ", commit.committer) self.assertEqual(b"FOO", commit.message) self.assertEqual([], commit.parents) self.assertEqual(432432432.0, commit.commit_time) self.assertEqual(432432432.0, commit.author_time) self.assertEqual(3600, commit.commit_timezone) self.assertEqual(3600, commit.author_timezone) self.assertEqual(commit, self.repo[b"refs/heads/foo"]) def test_commit_handler_markers(self): from fastimport import commands [c1, c2, c3] = build_commit_graph(self.repo.object_store, [[1], [2], [3]]) self.processor.markers[b"10"] = c1.id self.processor.markers[b"42"] = c2.id self.processor.markers[b"98"] = c3.id cmd = commands.CommitCommand( b"refs/heads/foo", b"mrkr", (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600), (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600), b"FOO", b":10", [b":42", b":98"], [], ) self.processor.commit_handler(cmd) commit = self.repo[self.processor.last_commit] self.assertEqual(c1.id, commit.parents[0]) self.assertEqual(c2.id, commit.parents[1]) self.assertEqual(c3.id, commit.parents[2]) def test_import_stream(self): markers = self.processor.import_stream( BytesIO( b"""blob mark :1 data 11 text for a commit refs/heads/master mark :2 committer Joe Foo 1288287382 +0000 data 20 M 100644 :1 a """ ) ) self.assertEqual(2, len(markers)) self.assertTrue(isinstance(self.repo[markers[b"1"]], Blob)) self.assertTrue(isinstance(self.repo[markers[b"2"]], Commit)) def test_file_add(self): from fastimport import commands cmd = commands.BlobCommand(b"23", b"data") self.processor.blob_handler(cmd) cmd = commands.CommitCommand( b"refs/heads/foo", b"mrkr", (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600), (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600), b"FOO", None, [], [commands.FileModifyCommand(b"path", 0o100644, b":23", None)], ) self.processor.commit_handler(cmd) commit = self.repo[self.processor.last_commit] self.assertEqual( [(b"path", 0o100644, b"6320cd248dd8aeaab759d5871f8781b5c0505172")], self.repo[commit.tree].items(), ) def simple_commit(self): from fastimport import commands cmd = commands.BlobCommand(b"23", b"data") self.processor.blob_handler(cmd) cmd = commands.CommitCommand( b"refs/heads/foo", b"mrkr", (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600), (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600), b"FOO", None, [], [commands.FileModifyCommand(b"path", 0o100644, b":23", None)], ) self.processor.commit_handler(cmd) commit = self.repo[self.processor.last_commit] return commit def make_file_commit(self, file_cmds): """Create a trivial commit with the specified file commands. Args: file_cmds: File commands to run. Returns: The created commit object """ from fastimport import commands cmd = commands.CommitCommand( b"refs/heads/foo", b"mrkr", (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600), (b"Jelmer", b"jelmer@samba.org", 432432432.0, 3600), b"FOO", None, [], file_cmds, ) self.processor.commit_handler(cmd) return self.repo[self.processor.last_commit] def test_file_copy(self): from fastimport import commands self.simple_commit() commit = self.make_file_commit([commands.FileCopyCommand(b"path", b"new_path")]) self.assertEqual( [ - (b"new_path", 0o100644, b"6320cd248dd8aeaab759d5871f8781b5c0505172"), - (b"path", 0o100644, b"6320cd248dd8aeaab759d5871f8781b5c0505172"), + ( + b"new_path", + 0o100644, + b"6320cd248dd8aeaab759d5871f8781b5c0505172", + ), + ( + b"path", + 0o100644, + b"6320cd248dd8aeaab759d5871f8781b5c0505172", + ), ], self.repo[commit.tree].items(), ) def test_file_move(self): from fastimport import commands self.simple_commit() commit = self.make_file_commit( [commands.FileRenameCommand(b"path", b"new_path")] ) self.assertEqual( [ - (b"new_path", 0o100644, b"6320cd248dd8aeaab759d5871f8781b5c0505172"), + ( + b"new_path", + 0o100644, + b"6320cd248dd8aeaab759d5871f8781b5c0505172", + ), ], self.repo[commit.tree].items(), ) def test_file_delete(self): from fastimport import commands self.simple_commit() commit = self.make_file_commit([commands.FileDeleteCommand(b"path")]) self.assertEqual([], self.repo[commit.tree].items()) def test_file_deleteall(self): from fastimport import commands self.simple_commit() commit = self.make_file_commit([commands.FileDeleteAllCommand()]) self.assertEqual([], self.repo[commit.tree].items()) diff --git a/dulwich/tests/test_grafts.py b/dulwich/tests/test_grafts.py index 3ec2c714..92fff290 100644 --- a/dulwich/tests/test_grafts.py +++ b/dulwich/tests/test_grafts.py @@ -1,208 +1,210 @@ # test_grafts.py -- Tests for graftpoints # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Tests for graftpoints.""" import os import tempfile import shutil from dulwich.errors import ObjectFormatException from dulwich.tests import TestCase from dulwich.objects import ( Tree, ) from dulwich.repo import ( parse_graftpoints, serialize_graftpoints, MemoryRepo, Repo, ) def makesha(digit): return (str(digit).encode("ascii") * 40)[:40] class GraftParserTests(TestCase): def assertParse(self, expected, graftpoints): self.assertEqual(expected, parse_graftpoints(iter(graftpoints))) def test_no_grafts(self): self.assertParse({}, []) def test_no_parents(self): self.assertParse({makesha(0): []}, [makesha(0)]) def test_parents(self): self.assertParse( {makesha(0): [makesha(1), makesha(2)]}, [b" ".join([makesha(0), makesha(1), makesha(2)])], ) def test_multiple_hybrid(self): self.assertParse( { makesha(0): [], makesha(1): [makesha(2)], makesha(3): [makesha(4), makesha(5)], }, [ makesha(0), b" ".join([makesha(1), makesha(2)]), b" ".join([makesha(3), makesha(4), makesha(5)]), ], ) class GraftSerializerTests(TestCase): def assertSerialize(self, expected, graftpoints): self.assertEqual(sorted(expected), sorted(serialize_graftpoints(graftpoints))) def test_no_grafts(self): self.assertSerialize(b"", {}) def test_no_parents(self): self.assertSerialize(makesha(0), {makesha(0): []}) def test_parents(self): self.assertSerialize( b" ".join([makesha(0), makesha(1), makesha(2)]), {makesha(0): [makesha(1), makesha(2)]}, ) def test_multiple_hybrid(self): self.assertSerialize( b"\n".join( [ makesha(0), b" ".join([makesha(1), makesha(2)]), b" ".join([makesha(3), makesha(4), makesha(5)]), ] ), { makesha(0): [], makesha(1): [makesha(2)], makesha(3): [makesha(4), makesha(5)], }, ) class GraftsInRepositoryBase(object): def tearDown(self): super(GraftsInRepositoryBase, self).tearDown() def get_repo_with_grafts(self, grafts): r = self._repo r._add_graftpoints(grafts) return r def test_no_grafts(self): r = self.get_repo_with_grafts({}) shas = [e.commit.id for e in r.get_walker()] self.assertEqual(shas, self._shas[::-1]) def test_no_parents_graft(self): r = self.get_repo_with_grafts({self._repo.head(): []}) self.assertEqual([e.commit.id for e in r.get_walker()], [r.head()]) def test_existing_parent_graft(self): r = self.get_repo_with_grafts({self._shas[-1]: [self._shas[0]]}) self.assertEqual( - [e.commit.id for e in r.get_walker()], [self._shas[-1], self._shas[0]] + [e.commit.id for e in r.get_walker()], + [self._shas[-1], self._shas[0]], ) def test_remove_graft(self): r = self.get_repo_with_grafts({self._repo.head(): []}) r._remove_graftpoints([self._repo.head()]) self.assertEqual([e.commit.id for e in r.get_walker()], self._shas[::-1]) def test_object_store_fail_invalid_parents(self): r = self._repo self.assertRaises( ObjectFormatException, r._add_graftpoints, {self._shas[-1]: ["1"]} ) class GraftsInRepoTests(GraftsInRepositoryBase, TestCase): def setUp(self): super(GraftsInRepoTests, self).setUp() self._repo_dir = os.path.join(tempfile.mkdtemp()) r = self._repo = Repo.init(self._repo_dir) self.addCleanup(shutil.rmtree, self._repo_dir) self._shas = [] commit_kwargs = { "committer": b"Test Committer ", "author": b"Test Author ", "commit_timestamp": 12395, "commit_timezone": 0, "author_timestamp": 12395, "author_timezone": 0, } self._shas.append(r.do_commit(b"empty commit", **commit_kwargs)) self._shas.append(r.do_commit(b"empty commit", **commit_kwargs)) self._shas.append(r.do_commit(b"empty commit", **commit_kwargs)) def test_init_with_empty_info_grafts(self): r = self._repo r._put_named_file(os.path.join("info", "grafts"), b"") r = Repo(self._repo_dir) self.assertEqual({}, r._graftpoints) def test_init_with_info_grafts(self): r = self._repo r._put_named_file( - os.path.join("info", "grafts"), self._shas[-1] + b" " + self._shas[0] + os.path.join("info", "grafts"), + self._shas[-1] + b" " + self._shas[0], ) r = Repo(self._repo_dir) self.assertEqual({self._shas[-1]: [self._shas[0]]}, r._graftpoints) class GraftsInMemoryRepoTests(GraftsInRepositoryBase, TestCase): def setUp(self): super(GraftsInMemoryRepoTests, self).setUp() r = self._repo = MemoryRepo() self._shas = [] tree = Tree() commit_kwargs = { "committer": b"Test Committer ", "author": b"Test Author ", "commit_timestamp": 12395, "commit_timezone": 0, "author_timestamp": 12395, "author_timezone": 0, "tree": tree.id, } self._shas.append(r.do_commit(b"empty commit", **commit_kwargs)) self._shas.append(r.do_commit(b"empty commit", **commit_kwargs)) self._shas.append(r.do_commit(b"empty commit", **commit_kwargs)) diff --git a/dulwich/tests/test_ignore.py b/dulwich/tests/test_ignore.py index afcb5cde..89fe2c38 100644 --- a/dulwich/tests/test_ignore.py +++ b/dulwich/tests/test_ignore.py @@ -1,251 +1,253 @@ # test_ignore.py -- Tests for ignore files. # Copyright (C) 2017 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Tests for ignore files.""" from io import BytesIO import os import re import shutil import tempfile from dulwich.tests import TestCase from dulwich.ignore import ( IgnoreFilter, IgnoreFilterManager, IgnoreFilterStack, Pattern, match_pattern, read_ignore_patterns, translate, ) from dulwich.repo import Repo POSITIVE_MATCH_TESTS = [ (b"foo.c", b"*.c"), (b".c", b"*.c"), (b"foo/foo.c", b"*.c"), (b"foo/foo.c", b"foo.c"), (b"foo.c", b"/*.c"), (b"foo.c", b"/foo.c"), (b"foo.c", b"foo.c"), (b"foo.c", b"foo.[ch]"), (b"foo/bar/bla.c", b"foo/**"), (b"foo/bar/bla/blie.c", b"foo/**/blie.c"), (b"foo/bar/bla.c", b"**/bla.c"), (b"bla.c", b"**/bla.c"), (b"foo/bar", b"foo/**/bar"), (b"foo/bla/bar", b"foo/**/bar"), (b"foo/bar/", b"bar/"), (b"foo/bar/", b"bar"), (b"foo/bar/something", b"foo/bar/*"), ] NEGATIVE_MATCH_TESTS = [ (b"foo.c", b"foo.[dh]"), (b"foo/foo.c", b"/foo.c"), (b"foo/foo.c", b"/*.c"), (b"foo/bar/", b"/bar/"), (b"foo/bar/", b"foo/bar/*"), (b"foo/bar", b"foo?bar"), ] TRANSLATE_TESTS = [ (b"*.c", b"(?ms)(.*/)?[^/]*\\.c/?\\Z"), (b"foo.c", b"(?ms)(.*/)?foo\\.c/?\\Z"), (b"/*.c", b"(?ms)[^/]*\\.c/?\\Z"), (b"/foo.c", b"(?ms)foo\\.c/?\\Z"), (b"foo.c", b"(?ms)(.*/)?foo\\.c/?\\Z"), (b"foo.[ch]", b"(?ms)(.*/)?foo\\.[ch]/?\\Z"), (b"bar/", b"(?ms)(.*/)?bar\\/\\Z"), (b"foo/**", b"(?ms)foo(/.*)?/?\\Z"), (b"foo/**/blie.c", b"(?ms)foo(/.*)?\\/blie\\.c/?\\Z"), (b"**/bla.c", b"(?ms)(.*/)?bla\\.c/?\\Z"), (b"foo/**/bar", b"(?ms)foo(/.*)?\\/bar/?\\Z"), (b"foo/bar/*", b"(?ms)foo\\/bar\\/[^/]+/?\\Z"), ] class TranslateTests(TestCase): def test_translate(self): for (pattern, regex) in TRANSLATE_TESTS: if re.escape(b"/") == b"/": # Slash is no longer escaped in Python3.7, so undo the escaping # in the expected return value.. regex = regex.replace(b"\\/", b"/") self.assertEqual( regex, translate(pattern), "orig pattern: %r, regex: %r, expected: %r" % (pattern, translate(pattern), regex), ) class ReadIgnorePatterns(TestCase): def test_read_file(self): f = BytesIO( b""" # a comment # and an empty line: \\#not a comment !negative with trailing whitespace with escaped trailing whitespace\\ """ ) # noqa: W291 self.assertEqual( list(read_ignore_patterns(f)), [ b"\\#not a comment", b"!negative", b"with trailing whitespace", b"with escaped trailing whitespace ", ], ) class MatchPatternTests(TestCase): def test_matches(self): for (path, pattern) in POSITIVE_MATCH_TESTS: self.assertTrue( - match_pattern(path, pattern), "path: %r, pattern: %r" % (path, pattern) + match_pattern(path, pattern), + "path: %r, pattern: %r" % (path, pattern), ) def test_no_matches(self): for (path, pattern) in NEGATIVE_MATCH_TESTS: self.assertFalse( - match_pattern(path, pattern), "path: %r, pattern: %r" % (path, pattern) + match_pattern(path, pattern), + "path: %r, pattern: %r" % (path, pattern), ) class IgnoreFilterTests(TestCase): def test_included(self): filter = IgnoreFilter([b"a.c", b"b.c"]) self.assertTrue(filter.is_ignored(b"a.c")) self.assertIs(None, filter.is_ignored(b"c.c")) self.assertEqual([Pattern(b"a.c")], list(filter.find_matching(b"a.c"))) self.assertEqual([], list(filter.find_matching(b"c.c"))) def test_included_ignorecase(self): filter = IgnoreFilter([b"a.c", b"b.c"], ignorecase=False) self.assertTrue(filter.is_ignored(b"a.c")) self.assertFalse(filter.is_ignored(b"A.c")) filter = IgnoreFilter([b"a.c", b"b.c"], ignorecase=True) self.assertTrue(filter.is_ignored(b"a.c")) self.assertTrue(filter.is_ignored(b"A.c")) self.assertTrue(filter.is_ignored(b"A.C")) def test_excluded(self): filter = IgnoreFilter([b"a.c", b"b.c", b"!c.c"]) self.assertFalse(filter.is_ignored(b"c.c")) self.assertIs(None, filter.is_ignored(b"d.c")) self.assertEqual([Pattern(b"!c.c")], list(filter.find_matching(b"c.c"))) self.assertEqual([], list(filter.find_matching(b"d.c"))) def test_include_exclude_include(self): filter = IgnoreFilter([b"a.c", b"!a.c", b"a.c"]) self.assertTrue(filter.is_ignored(b"a.c")) self.assertEqual( [Pattern(b"a.c"), Pattern(b"!a.c"), Pattern(b"a.c")], list(filter.find_matching(b"a.c")), ) def test_manpage(self): # A specific example from the gitignore manpage filter = IgnoreFilter([b"/*", b"!/foo", b"/foo/*", b"!/foo/bar"]) self.assertTrue(filter.is_ignored(b"a.c")) self.assertTrue(filter.is_ignored(b"foo/blie")) self.assertFalse(filter.is_ignored(b"foo")) self.assertFalse(filter.is_ignored(b"foo/bar")) self.assertFalse(filter.is_ignored(b"foo/bar/")) self.assertFalse(filter.is_ignored(b"foo/bar/bloe")) class IgnoreFilterStackTests(TestCase): def test_stack_first(self): filter1 = IgnoreFilter([b"[a].c", b"[b].c", b"![d].c"]) filter2 = IgnoreFilter([b"[a].c", b"![b],c", b"[c].c", b"[d].c"]) stack = IgnoreFilterStack([filter1, filter2]) self.assertIs(True, stack.is_ignored(b"a.c")) self.assertIs(True, stack.is_ignored(b"b.c")) self.assertIs(True, stack.is_ignored(b"c.c")) self.assertIs(False, stack.is_ignored(b"d.c")) self.assertIs(None, stack.is_ignored(b"e.c")) class IgnoreFilterManagerTests(TestCase): def test_load_ignore(self): tmp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) repo = Repo.init(tmp_dir) with open(os.path.join(repo.path, ".gitignore"), "wb") as f: f.write(b"/foo/bar\n") f.write(b"/dir2\n") f.write(b"/dir3/\n") os.mkdir(os.path.join(repo.path, "dir")) with open(os.path.join(repo.path, "dir", ".gitignore"), "wb") as f: f.write(b"/blie\n") with open(os.path.join(repo.path, "dir", "blie"), "wb") as f: f.write(b"IGNORED") p = os.path.join(repo.controldir(), "info", "exclude") with open(p, "wb") as f: f.write(b"/excluded\n") m = IgnoreFilterManager.from_repo(repo) self.assertTrue(m.is_ignored("dir/blie")) self.assertIs(None, m.is_ignored(os.path.join("dir", "bloe"))) self.assertIs(None, m.is_ignored("dir")) self.assertTrue(m.is_ignored(os.path.join("foo", "bar"))) self.assertTrue(m.is_ignored(os.path.join("excluded"))) self.assertTrue(m.is_ignored(os.path.join("dir2", "fileinignoreddir"))) self.assertFalse(m.is_ignored("dir3")) self.assertTrue(m.is_ignored("dir3/")) self.assertTrue(m.is_ignored("dir3/bla")) def test_load_ignore_ignorecase(self): tmp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) repo = Repo.init(tmp_dir) config = repo.get_config() config.set(b"core", b"ignorecase", True) config.write_to_path() with open(os.path.join(repo.path, ".gitignore"), "wb") as f: f.write(b"/foo/bar\n") f.write(b"/dir\n") m = IgnoreFilterManager.from_repo(repo) self.assertTrue(m.is_ignored(os.path.join("dir", "blie"))) self.assertTrue(m.is_ignored(os.path.join("DIR", "blie"))) def test_ignored_contents(self): tmp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) repo = Repo.init(tmp_dir) with open(os.path.join(repo.path, ".gitignore"), "wb") as f: f.write(b"a/*\n") f.write(b"!a/*.txt\n") m = IgnoreFilterManager.from_repo(repo) os.mkdir(os.path.join(repo.path, "a")) self.assertIs(None, m.is_ignored("a")) self.assertIs(None, m.is_ignored("a/")) self.assertFalse(m.is_ignored("a/b.txt")) self.assertTrue(m.is_ignored("a/c.dat")) diff --git a/dulwich/tests/test_index.py b/dulwich/tests/test_index.py index fe74ba9a..bef5e1d9 100644 --- a/dulwich/tests/test_index.py +++ b/dulwich/tests/test_index.py @@ -1,825 +1,834 @@ # -*- coding: utf-8 -*- # test_index.py -- Tests for the git index # encoding: utf-8 # Copyright (C) 2008-2009 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Tests for the index.""" from io import BytesIO import os import shutil import stat import struct import sys import tempfile import warnings from dulwich.index import ( Index, build_index_from_tree, cleanup_mode, commit_tree, get_unstaged_changes, index_entry_from_stat, read_index, read_index_dict, validate_path_element_default, validate_path_element_ntfs, write_cache_time, write_index, write_index_dict, _tree_to_fs_path, _fs_to_tree_path, ) from dulwich.object_store import ( MemoryObjectStore, ) from dulwich.objects import ( Blob, Commit, Tree, S_IFGITLINK, ) from dulwich.repo import Repo from dulwich.tests import ( TestCase, skipIf, ) from dulwich.tests.utils import ( setup_warning_catcher, ) def can_symlink(): """Return whether running process can create symlinks.""" if sys.platform != "win32": # Platforms other than Windows should allow symlinks without issues. return True if not hasattr(os, "symlink"): # Older Python versions do not have `os.symlink` on Windows. return False test_source = tempfile.mkdtemp() test_target = test_source + "can_symlink" try: os.symlink(test_source, test_target) except (NotImplementedError, OSError): return False return True class IndexTestCase(TestCase): datadir = os.path.join(os.path.dirname(__file__), "data/indexes") def get_simple_index(self, name): return Index(os.path.join(self.datadir, name)) class SimpleIndexTestCase(IndexTestCase): def test_len(self): self.assertEqual(1, len(self.get_simple_index("index"))) def test_iter(self): self.assertEqual([b"bla"], list(self.get_simple_index("index"))) def test_iterobjects(self): self.assertEqual( [(b"bla", b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", 33188)], list(self.get_simple_index("index").iterobjects()), ) def test_iterblobs(self): warnings.simplefilter("always", UserWarning) self.addCleanup(warnings.resetwarnings) warnings_list, restore_warnings = setup_warning_catcher() self.addCleanup(restore_warnings) self.assertEqual( [(b"bla", b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", 33188)], list(self.get_simple_index("index").iterblobs()), ) expected_warning = PendingDeprecationWarning("Use iterobjects() instead.") for w in warnings_list: if type(w) == type(expected_warning) and w.args == expected_warning.args: break else: raise AssertionError( "Expected warning %r not in %r" % (expected_warning, warnings_list) ) def test_getitem(self): self.assertEqual( ( (1230680220, 0), (1230680220, 0), 2050, 3761020, 33188, 1000, 1000, 0, b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", 0, ), self.get_simple_index("index")[b"bla"], ) def test_empty(self): i = self.get_simple_index("notanindex") self.assertEqual(0, len(i)) self.assertFalse(os.path.exists(i._filename)) def test_against_empty_tree(self): i = self.get_simple_index("index") changes = list(i.changes_from_tree(MemoryObjectStore(), None)) self.assertEqual(1, len(changes)) (oldname, newname), (oldmode, newmode), (oldsha, newsha) = changes[0] self.assertEqual(b"bla", newname) self.assertEqual(b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", newsha) class SimpleIndexWriterTestCase(IndexTestCase): def setUp(self): IndexTestCase.setUp(self) self.tempdir = tempfile.mkdtemp() def tearDown(self): IndexTestCase.tearDown(self) shutil.rmtree(self.tempdir) def test_simple_write(self): entries = [ ( b"barbla", (1230680220, 0), (1230680220, 0), 2050, 3761020, 33188, 1000, 1000, 0, b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", 0, ) ] filename = os.path.join(self.tempdir, "test-simple-write-index") with open(filename, "wb+") as x: write_index(x, entries) with open(filename, "rb") as x: self.assertEqual(entries, list(read_index(x))) class ReadIndexDictTests(IndexTestCase): def setUp(self): IndexTestCase.setUp(self) self.tempdir = tempfile.mkdtemp() def tearDown(self): IndexTestCase.tearDown(self) shutil.rmtree(self.tempdir) def test_simple_write(self): entries = { b"barbla": ( (1230680220, 0), (1230680220, 0), 2050, 3761020, 33188, 1000, 1000, 0, b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", 0, ) } filename = os.path.join(self.tempdir, "test-simple-write-index") with open(filename, "wb+") as x: write_index_dict(x, entries) with open(filename, "rb") as x: self.assertEqual(entries, read_index_dict(x)) class CommitTreeTests(TestCase): def setUp(self): super(CommitTreeTests, self).setUp() self.store = MemoryObjectStore() def test_single_blob(self): blob = Blob() blob.data = b"foo" self.store.add_object(blob) blobs = [(b"bla", blob.id, stat.S_IFREG)] rootid = commit_tree(self.store, blobs) self.assertEqual(rootid, b"1a1e80437220f9312e855c37ac4398b68e5c1d50") self.assertEqual((stat.S_IFREG, blob.id), self.store[rootid][b"bla"]) self.assertEqual(set([rootid, blob.id]), set(self.store._data.keys())) def test_nested(self): blob = Blob() blob.data = b"foo" self.store.add_object(blob) blobs = [(b"bla/bar", blob.id, stat.S_IFREG)] rootid = commit_tree(self.store, blobs) self.assertEqual(rootid, b"d92b959b216ad0d044671981196781b3258fa537") dirid = self.store[rootid][b"bla"][1] self.assertEqual(dirid, b"c1a1deb9788150829579a8b4efa6311e7b638650") self.assertEqual((stat.S_IFDIR, dirid), self.store[rootid][b"bla"]) self.assertEqual((stat.S_IFREG, blob.id), self.store[dirid][b"bar"]) self.assertEqual(set([rootid, dirid, blob.id]), set(self.store._data.keys())) class CleanupModeTests(TestCase): def assertModeEqual(self, expected, got): self.assertEqual(expected, got, "%o != %o" % (expected, got)) def test_file(self): self.assertModeEqual(0o100644, cleanup_mode(0o100000)) def test_executable(self): self.assertModeEqual(0o100755, cleanup_mode(0o100711)) self.assertModeEqual(0o100755, cleanup_mode(0o100700)) def test_symlink(self): self.assertModeEqual(0o120000, cleanup_mode(0o120711)) def test_dir(self): self.assertModeEqual(0o040000, cleanup_mode(0o40531)) def test_submodule(self): self.assertModeEqual(0o160000, cleanup_mode(0o160744)) class WriteCacheTimeTests(TestCase): def test_write_string(self): f = BytesIO() self.assertRaises(TypeError, write_cache_time, f, "foo") def test_write_int(self): f = BytesIO() write_cache_time(f, 434343) self.assertEqual(struct.pack(">LL", 434343, 0), f.getvalue()) def test_write_tuple(self): f = BytesIO() write_cache_time(f, (434343, 21)) self.assertEqual(struct.pack(">LL", 434343, 21), f.getvalue()) def test_write_float(self): f = BytesIO() write_cache_time(f, 434343.000000021) self.assertEqual(struct.pack(">LL", 434343, 21), f.getvalue()) class IndexEntryFromStatTests(TestCase): def test_simple(self): st = os.stat_result( ( 16877, 131078, 64769, 154, 1000, 1000, 12288, 1323629595, 1324180496, 1324180496, ) ) entry = index_entry_from_stat(st, "22" * 20, 0) self.assertEqual( entry, ( 1324180496, 1324180496, 64769, 131078, 16384, 1000, 1000, 12288, "2222222222222222222222222222222222222222", 0, ), ) def test_override_mode(self): st = os.stat_result( ( stat.S_IFREG + 0o644, 131078, 64769, 154, 1000, 1000, 12288, 1323629595, 1324180496, 1324180496, ) ) entry = index_entry_from_stat(st, "22" * 20, 0, mode=stat.S_IFREG + 0o755) self.assertEqual( entry, ( 1324180496, 1324180496, 64769, 131078, 33261, 1000, 1000, 12288, "2222222222222222222222222222222222222222", 0, ), ) class BuildIndexTests(TestCase): def assertReasonableIndexEntry(self, index_entry, mode, filesize, sha): self.assertEqual(index_entry[4], mode) # mode self.assertEqual(index_entry[7], filesize) # filesize self.assertEqual(index_entry[8], sha) # sha def assertFileContents(self, path, contents, symlink=False): if symlink: self.assertEqual(os.readlink(path), contents) else: with open(path, "rb") as f: self.assertEqual(f.read(), contents) def test_empty(self): repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: tree = Tree() repo.object_store.add_object(tree) build_index_from_tree( repo.path, repo.index_path(), repo.object_store, tree.id ) # Verify index entries index = repo.open_index() self.assertEqual(len(index), 0) # Verify no files self.assertEqual([".git"], os.listdir(repo.path)) def test_git_dir(self): repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: # Populate repo filea = Blob.from_string(b"file a") filee = Blob.from_string(b"d") tree = Tree() tree[b".git/a"] = (stat.S_IFREG | 0o644, filea.id) tree[b"c/e"] = (stat.S_IFREG | 0o644, filee.id) repo.object_store.add_objects([(o, None) for o in [filea, filee, tree]]) build_index_from_tree( repo.path, repo.index_path(), repo.object_store, tree.id ) # Verify index entries index = repo.open_index() self.assertEqual(len(index), 1) # filea apath = os.path.join(repo.path, ".git", "a") self.assertFalse(os.path.exists(apath)) # filee epath = os.path.join(repo.path, "c", "e") self.assertTrue(os.path.exists(epath)) self.assertReasonableIndexEntry( index[b"c/e"], stat.S_IFREG | 0o644, 1, filee.id ) self.assertFileContents(epath, b"d") def test_nonempty(self): repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: # Populate repo filea = Blob.from_string(b"file a") fileb = Blob.from_string(b"file b") filed = Blob.from_string(b"file d") tree = Tree() tree[b"a"] = (stat.S_IFREG | 0o644, filea.id) tree[b"b"] = (stat.S_IFREG | 0o644, fileb.id) tree[b"c/d"] = (stat.S_IFREG | 0o644, filed.id) repo.object_store.add_objects( [(o, None) for o in [filea, fileb, filed, tree]] ) build_index_from_tree( repo.path, repo.index_path(), repo.object_store, tree.id ) # Verify index entries index = repo.open_index() self.assertEqual(len(index), 3) # filea apath = os.path.join(repo.path, "a") self.assertTrue(os.path.exists(apath)) self.assertReasonableIndexEntry( index[b"a"], stat.S_IFREG | 0o644, 6, filea.id ) self.assertFileContents(apath, b"file a") # fileb bpath = os.path.join(repo.path, "b") self.assertTrue(os.path.exists(bpath)) self.assertReasonableIndexEntry( index[b"b"], stat.S_IFREG | 0o644, 6, fileb.id ) self.assertFileContents(bpath, b"file b") # filed dpath = os.path.join(repo.path, "c", "d") self.assertTrue(os.path.exists(dpath)) self.assertReasonableIndexEntry( index[b"c/d"], stat.S_IFREG | 0o644, 6, filed.id ) self.assertFileContents(dpath, b"file d") # Verify no extra files self.assertEqual([".git", "a", "b", "c"], sorted(os.listdir(repo.path))) self.assertEqual(["d"], sorted(os.listdir(os.path.join(repo.path, "c")))) @skipIf(not getattr(os, "sync", None), "Requires sync support") def test_norewrite(self): repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: # Populate repo filea = Blob.from_string(b"file a") filea_path = os.path.join(repo_dir, "a") tree = Tree() tree[b"a"] = (stat.S_IFREG | 0o644, filea.id) repo.object_store.add_objects([(o, None) for o in [filea, tree]]) # First Write build_index_from_tree( repo.path, repo.index_path(), repo.object_store, tree.id ) # Use sync as metadata can be cached on some FS os.sync() mtime = os.stat(filea_path).st_mtime # Test Rewrite build_index_from_tree( repo.path, repo.index_path(), repo.object_store, tree.id ) os.sync() self.assertEqual(mtime, os.stat(filea_path).st_mtime) # Modify content with open(filea_path, "wb") as fh: fh.write(b"test a") os.sync() mtime = os.stat(filea_path).st_mtime # Test rewrite build_index_from_tree( repo.path, repo.index_path(), repo.object_store, tree.id ) os.sync() with open(filea_path, "rb") as fh: self.assertEqual(b"file a", fh.read()) @skipIf(not can_symlink(), "Requires symlink support") def test_symlink(self): repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: # Populate repo filed = Blob.from_string(b"file d") filee = Blob.from_string(b"d") tree = Tree() tree[b"c/d"] = (stat.S_IFREG | 0o644, filed.id) tree[b"c/e"] = (stat.S_IFLNK, filee.id) # symlink repo.object_store.add_objects([(o, None) for o in [filed, filee, tree]]) build_index_from_tree( repo.path, repo.index_path(), repo.object_store, tree.id ) # Verify index entries index = repo.open_index() # symlink to d epath = os.path.join(repo.path, "c", "e") self.assertTrue(os.path.exists(epath)) self.assertReasonableIndexEntry( index[b"c/e"], stat.S_IFLNK, 0 if sys.platform == "win32" else 1, filee.id, ) self.assertFileContents(epath, "d", symlink=True) def test_no_decode_encode(self): repo_dir = tempfile.mkdtemp() repo_dir_bytes = os.fsencode(repo_dir) self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: # Populate repo file = Blob.from_string(b"foo") tree = Tree() latin1_name = u"À".encode("latin1") latin1_path = os.path.join(repo_dir_bytes, latin1_name) utf8_name = u"À".encode("utf8") utf8_path = os.path.join(repo_dir_bytes, utf8_name) tree[latin1_name] = (stat.S_IFREG | 0o644, file.id) tree[utf8_name] = (stat.S_IFREG | 0o644, file.id) repo.object_store.add_objects([(o, None) for o in [file, tree]]) try: build_index_from_tree( repo.path, repo.index_path(), repo.object_store, tree.id ) except OSError as e: if e.errno == 92 and sys.platform == "darwin": # Our filename isn't supported by the platform :( self.skipTest("can not write filename %r" % e.filename) else: raise except UnicodeDecodeError: # This happens e.g. with python3.6 on Windows. # It implicitly decodes using utf8, which doesn't work. self.skipTest("can not implicitly convert as utf8") # Verify index entries index = repo.open_index() self.assertIn(latin1_name, index) self.assertIn(utf8_name, index) self.assertTrue(os.path.exists(latin1_path)) self.assertTrue(os.path.exists(utf8_path)) def test_git_submodule(self): repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: filea = Blob.from_string(b"file alalala") subtree = Tree() subtree[b"a"] = (stat.S_IFREG | 0o644, filea.id) c = Commit() c.tree = subtree.id c.committer = c.author = b"Somebody " c.commit_time = c.author_time = 42342 c.commit_timezone = c.author_timezone = 0 c.parents = [] c.message = b"Subcommit" tree = Tree() tree[b"c"] = (S_IFGITLINK, c.id) repo.object_store.add_objects([(o, None) for o in [tree]]) build_index_from_tree( repo.path, repo.index_path(), repo.object_store, tree.id ) # Verify index entries index = repo.open_index() self.assertEqual(len(index), 1) # filea apath = os.path.join(repo.path, "c/a") self.assertFalse(os.path.exists(apath)) # dir c cpath = os.path.join(repo.path, "c") self.assertTrue(os.path.isdir(cpath)) self.assertEqual(index[b"c"][4], S_IFGITLINK) # mode self.assertEqual(index[b"c"][8], c.id) # sha def test_git_submodule_exists(self): repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: filea = Blob.from_string(b"file alalala") subtree = Tree() subtree[b"a"] = (stat.S_IFREG | 0o644, filea.id) c = Commit() c.tree = subtree.id c.committer = c.author = b"Somebody " c.commit_time = c.author_time = 42342 c.commit_timezone = c.author_timezone = 0 c.parents = [] c.message = b"Subcommit" tree = Tree() tree[b"c"] = (S_IFGITLINK, c.id) os.mkdir(os.path.join(repo_dir, "c")) repo.object_store.add_objects([(o, None) for o in [tree]]) build_index_from_tree( repo.path, repo.index_path(), repo.object_store, tree.id ) # Verify index entries index = repo.open_index() self.assertEqual(len(index), 1) # filea apath = os.path.join(repo.path, "c/a") self.assertFalse(os.path.exists(apath)) # dir c cpath = os.path.join(repo.path, "c") self.assertTrue(os.path.isdir(cpath)) self.assertEqual(index[b"c"][4], S_IFGITLINK) # mode self.assertEqual(index[b"c"][8], c.id) # sha class GetUnstagedChangesTests(TestCase): def test_get_unstaged_changes(self): """Unit test for get_unstaged_changes.""" repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: # Commit a dummy file then modify it foo1_fullpath = os.path.join(repo_dir, "foo1") with open(foo1_fullpath, "wb") as f: f.write(b"origstuff") foo2_fullpath = os.path.join(repo_dir, "foo2") with open(foo2_fullpath, "wb") as f: f.write(b"origstuff") repo.stage(["foo1", "foo2"]) repo.do_commit( - b"test status", author=b"author ", committer=b"committer " + b"test status", + author=b"author ", + committer=b"committer ", ) with open(foo1_fullpath, "wb") as f: f.write(b"newstuff") # modify access and modify time of path os.utime(foo1_fullpath, (0, 0)) changes = get_unstaged_changes(repo.open_index(), repo_dir) self.assertEqual(list(changes), [b"foo1"]) def test_get_unstaged_deleted_changes(self): """Unit test for get_unstaged_changes.""" repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: # Commit a dummy file then remove it foo1_fullpath = os.path.join(repo_dir, "foo1") with open(foo1_fullpath, "wb") as f: f.write(b"origstuff") repo.stage(["foo1"]) repo.do_commit( - b"test status", author=b"author ", committer=b"committer " + b"test status", + author=b"author ", + committer=b"committer ", ) os.unlink(foo1_fullpath) changes = get_unstaged_changes(repo.open_index(), repo_dir) self.assertEqual(list(changes), [b"foo1"]) def test_get_unstaged_changes_removed_replaced_by_directory(self): """Unit test for get_unstaged_changes.""" repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: # Commit a dummy file then modify it foo1_fullpath = os.path.join(repo_dir, "foo1") with open(foo1_fullpath, "wb") as f: f.write(b"origstuff") repo.stage(["foo1"]) repo.do_commit( - b"test status", author=b"author ", committer=b"committer " + b"test status", + author=b"author ", + committer=b"committer ", ) os.remove(foo1_fullpath) os.mkdir(foo1_fullpath) changes = get_unstaged_changes(repo.open_index(), repo_dir) self.assertEqual(list(changes), [b"foo1"]) @skipIf(not can_symlink(), "Requires symlink support") def test_get_unstaged_changes_removed_replaced_by_link(self): """Unit test for get_unstaged_changes.""" repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: # Commit a dummy file then modify it foo1_fullpath = os.path.join(repo_dir, "foo1") with open(foo1_fullpath, "wb") as f: f.write(b"origstuff") repo.stage(["foo1"]) repo.do_commit( - b"test status", author=b"author ", committer=b"committer " + b"test status", + author=b"author ", + committer=b"committer ", ) os.remove(foo1_fullpath) os.symlink(os.path.dirname(foo1_fullpath), foo1_fullpath) changes = get_unstaged_changes(repo.open_index(), repo_dir) self.assertEqual(list(changes), [b"foo1"]) class TestValidatePathElement(TestCase): def test_default(self): self.assertTrue(validate_path_element_default(b"bla")) self.assertTrue(validate_path_element_default(b".bla")) self.assertFalse(validate_path_element_default(b".git")) self.assertFalse(validate_path_element_default(b".giT")) self.assertFalse(validate_path_element_default(b"..")) self.assertTrue(validate_path_element_default(b"git~1")) def test_ntfs(self): self.assertTrue(validate_path_element_ntfs(b"bla")) self.assertTrue(validate_path_element_ntfs(b".bla")) self.assertFalse(validate_path_element_ntfs(b".git")) self.assertFalse(validate_path_element_ntfs(b".giT")) self.assertFalse(validate_path_element_ntfs(b"..")) self.assertFalse(validate_path_element_ntfs(b"git~1")) class TestTreeFSPathConversion(TestCase): def test_tree_to_fs_path(self): tree_path = u"délwíçh/foo".encode("utf8") fs_path = _tree_to_fs_path(b"/prefix/path", tree_path) self.assertEqual( - fs_path, os.fsencode(os.path.join(u"/prefix/path", u"délwíçh", u"foo")) + fs_path, + os.fsencode(os.path.join(u"/prefix/path", u"délwíçh", u"foo")), ) def test_fs_to_tree_path_str(self): fs_path = os.path.join(os.path.join(u"délwíçh", u"foo")) tree_path = _fs_to_tree_path(fs_path) self.assertEqual(tree_path, u"délwíçh/foo".encode("utf-8")) def test_fs_to_tree_path_bytes(self): fs_path = os.path.join(os.fsencode(os.path.join(u"délwíçh", u"foo"))) tree_path = _fs_to_tree_path(fs_path) self.assertEqual(tree_path, u"délwíçh/foo".encode("utf-8")) diff --git a/dulwich/tests/test_lru_cache.py b/dulwich/tests/test_lru_cache.py index c8f21c5f..7696e3ee 100644 --- a/dulwich/tests/test_lru_cache.py +++ b/dulwich/tests/test_lru_cache.py @@ -1,454 +1,455 @@ # Copyright (C) 2006, 2008 Canonical Ltd # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Tests for the lru_cache module.""" from dulwich import ( lru_cache, ) from dulwich.tests import ( TestCase, ) class TestLRUCache(TestCase): """Test that LRU cache properly keeps track of entries.""" def test_cache_size(self): cache = lru_cache.LRUCache(max_cache=10) self.assertEqual(10, cache.cache_size()) cache = lru_cache.LRUCache(max_cache=256) self.assertEqual(256, cache.cache_size()) cache.resize(512) self.assertEqual(512, cache.cache_size()) def test_missing(self): cache = lru_cache.LRUCache(max_cache=10) self.assertFalse("foo" in cache) self.assertRaises(KeyError, cache.__getitem__, "foo") cache["foo"] = "bar" self.assertEqual("bar", cache["foo"]) self.assertTrue("foo" in cache) self.assertFalse("bar" in cache) def test_map_None(self): # Make sure that we can properly map None as a key. cache = lru_cache.LRUCache(max_cache=10) self.assertFalse(None in cache) cache[None] = 1 self.assertEqual(1, cache[None]) cache[None] = 2 self.assertEqual(2, cache[None]) # Test the various code paths of __getitem__, to make sure that we can # handle when None is the key for the LRU and the MRU cache[1] = 3 cache[None] = 1 cache[None] cache[1] cache[None] self.assertEqual([None, 1], [n.key for n in cache._walk_lru()]) def test_add__null_key(self): cache = lru_cache.LRUCache(max_cache=10) self.assertRaises(ValueError, cache.add, lru_cache._null_key, 1) def test_overflow(self): """Adding extra entries will pop out old ones.""" cache = lru_cache.LRUCache(max_cache=1, after_cleanup_count=1) cache["foo"] = "bar" # With a max cache of 1, adding 'baz' should pop out 'foo' cache["baz"] = "biz" self.assertFalse("foo" in cache) self.assertTrue("baz" in cache) self.assertEqual("biz", cache["baz"]) def test_by_usage(self): """Accessing entries bumps them up in priority.""" cache = lru_cache.LRUCache(max_cache=2) cache["baz"] = "biz" cache["foo"] = "bar" self.assertEqual("biz", cache["baz"]) # This must kick out 'foo' because it was the last accessed cache["nub"] = "in" self.assertFalse("foo" in cache) def test_cleanup(self): """Test that we can use a cleanup function.""" cleanup_called = [] def cleanup_func(key, val): cleanup_called.append((key, val)) cache = lru_cache.LRUCache(max_cache=2, after_cleanup_count=2) cache.add("baz", "1", cleanup=cleanup_func) cache.add("foo", "2", cleanup=cleanup_func) cache.add("biz", "3", cleanup=cleanup_func) self.assertEqual([("baz", "1")], cleanup_called) # 'foo' is now most recent, so final cleanup will call it last cache["foo"] cache.clear() self.assertEqual([("baz", "1"), ("biz", "3"), ("foo", "2")], cleanup_called) def test_cleanup_on_replace(self): """Replacing an object should cleanup the old value.""" cleanup_called = [] def cleanup_func(key, val): cleanup_called.append((key, val)) cache = lru_cache.LRUCache(max_cache=2) cache.add(1, 10, cleanup=cleanup_func) cache.add(2, 20, cleanup=cleanup_func) cache.add(2, 25, cleanup=cleanup_func) self.assertEqual([(2, 20)], cleanup_called) self.assertEqual(25, cache[2]) # Even __setitem__ should make sure cleanup() is called cache[2] = 26 self.assertEqual([(2, 20), (2, 25)], cleanup_called) def test_len(self): cache = lru_cache.LRUCache(max_cache=10, after_cleanup_count=10) cache[1] = 10 cache[2] = 20 cache[3] = 30 cache[4] = 40 self.assertEqual(4, len(cache)) cache[5] = 50 cache[6] = 60 cache[7] = 70 cache[8] = 80 self.assertEqual(8, len(cache)) cache[1] = 15 # replacement self.assertEqual(8, len(cache)) cache[9] = 90 cache[10] = 100 cache[11] = 110 # We hit the max self.assertEqual(10, len(cache)) self.assertEqual( - [11, 10, 9, 1, 8, 7, 6, 5, 4, 3], [n.key for n in cache._walk_lru()] + [11, 10, 9, 1, 8, 7, 6, 5, 4, 3], + [n.key for n in cache._walk_lru()], ) def test_cleanup_shrinks_to_after_clean_count(self): cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=3) cache.add(1, 10) cache.add(2, 20) cache.add(3, 25) cache.add(4, 30) cache.add(5, 35) self.assertEqual(5, len(cache)) # This will bump us over the max, which causes us to shrink down to # after_cleanup_cache size cache.add(6, 40) self.assertEqual(3, len(cache)) def test_after_cleanup_larger_than_max(self): cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=10) self.assertEqual(5, cache._after_cleanup_count) def test_after_cleanup_none(self): cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=None) # By default _after_cleanup_size is 80% of the normal size self.assertEqual(4, cache._after_cleanup_count) def test_cleanup_2(self): cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=2) # Add these in order cache.add(1, 10) cache.add(2, 20) cache.add(3, 25) cache.add(4, 30) cache.add(5, 35) self.assertEqual(5, len(cache)) # Force a compaction cache.cleanup() self.assertEqual(2, len(cache)) def test_preserve_last_access_order(self): cache = lru_cache.LRUCache(max_cache=5) # Add these in order cache.add(1, 10) cache.add(2, 20) cache.add(3, 25) cache.add(4, 30) cache.add(5, 35) self.assertEqual([5, 4, 3, 2, 1], [n.key for n in cache._walk_lru()]) # Now access some randomly cache[2] cache[5] cache[3] cache[2] self.assertEqual([2, 3, 5, 4, 1], [n.key for n in cache._walk_lru()]) def test_get(self): cache = lru_cache.LRUCache(max_cache=5) cache.add(1, 10) cache.add(2, 20) self.assertEqual(20, cache.get(2)) self.assertEqual(None, cache.get(3)) obj = object() self.assertTrue(obj is cache.get(3, obj)) self.assertEqual([2, 1], [n.key for n in cache._walk_lru()]) self.assertEqual(10, cache.get(1)) self.assertEqual([1, 2], [n.key for n in cache._walk_lru()]) def test_keys(self): cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=5) cache[1] = 2 cache[2] = 3 cache[3] = 4 self.assertEqual([1, 2, 3], sorted(cache.keys())) cache[4] = 5 cache[5] = 6 cache[6] = 7 self.assertEqual([2, 3, 4, 5, 6], sorted(cache.keys())) def test_resize_smaller(self): cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=4) cache[1] = 2 cache[2] = 3 cache[3] = 4 cache[4] = 5 cache[5] = 6 self.assertEqual([1, 2, 3, 4, 5], sorted(cache.keys())) cache[6] = 7 self.assertEqual([3, 4, 5, 6], sorted(cache.keys())) # Now resize to something smaller, which triggers a cleanup cache.resize(max_cache=3, after_cleanup_count=2) self.assertEqual([5, 6], sorted(cache.keys())) # Adding something will use the new size cache[7] = 8 self.assertEqual([5, 6, 7], sorted(cache.keys())) cache[8] = 9 self.assertEqual([7, 8], sorted(cache.keys())) def test_resize_larger(self): cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=4) cache[1] = 2 cache[2] = 3 cache[3] = 4 cache[4] = 5 cache[5] = 6 self.assertEqual([1, 2, 3, 4, 5], sorted(cache.keys())) cache[6] = 7 self.assertEqual([3, 4, 5, 6], sorted(cache.keys())) cache.resize(max_cache=8, after_cleanup_count=6) self.assertEqual([3, 4, 5, 6], sorted(cache.keys())) cache[7] = 8 cache[8] = 9 cache[9] = 10 cache[10] = 11 self.assertEqual([3, 4, 5, 6, 7, 8, 9, 10], sorted(cache.keys())) cache[11] = 12 # triggers cleanup back to new after_cleanup_count self.assertEqual([6, 7, 8, 9, 10, 11], sorted(cache.keys())) class TestLRUSizeCache(TestCase): def test_basic_init(self): cache = lru_cache.LRUSizeCache() self.assertEqual(2048, cache._max_cache) self.assertEqual(int(cache._max_size * 0.8), cache._after_cleanup_size) self.assertEqual(0, cache._value_size) def test_add__null_key(self): cache = lru_cache.LRUSizeCache() self.assertRaises(ValueError, cache.add, lru_cache._null_key, 1) def test_add_tracks_size(self): cache = lru_cache.LRUSizeCache() self.assertEqual(0, cache._value_size) cache.add("my key", "my value text") self.assertEqual(13, cache._value_size) def test_remove_tracks_size(self): cache = lru_cache.LRUSizeCache() self.assertEqual(0, cache._value_size) cache.add("my key", "my value text") self.assertEqual(13, cache._value_size) node = cache._cache["my key"] cache._remove_node(node) self.assertEqual(0, cache._value_size) def test_no_add_over_size(self): """Adding a large value may not be cached at all.""" cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=5) self.assertEqual(0, cache._value_size) self.assertEqual({}, cache.items()) cache.add("test", "key") self.assertEqual(3, cache._value_size) self.assertEqual({"test": "key"}, cache.items()) cache.add("test2", "key that is too big") self.assertEqual(3, cache._value_size) self.assertEqual({"test": "key"}, cache.items()) # If we would add a key, only to cleanup and remove all cached entries, # then obviously that value should not be stored cache.add("test3", "bigkey") self.assertEqual(3, cache._value_size) self.assertEqual({"test": "key"}, cache.items()) cache.add("test4", "bikey") self.assertEqual(3, cache._value_size) self.assertEqual({"test": "key"}, cache.items()) def test_no_add_over_size_cleanup(self): """If a large value is not cached, we will call cleanup right away.""" cleanup_calls = [] def cleanup(key, value): cleanup_calls.append((key, value)) cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=5) self.assertEqual(0, cache._value_size) self.assertEqual({}, cache.items()) cache.add("test", "key that is too big", cleanup=cleanup) # key was not added self.assertEqual(0, cache._value_size) self.assertEqual({}, cache.items()) # and cleanup was called self.assertEqual([("test", "key that is too big")], cleanup_calls) def test_adding_clears_cache_based_on_size(self): """The cache is cleared in LRU order until small enough""" cache = lru_cache.LRUSizeCache(max_size=20) cache.add("key1", "value") # 5 chars cache.add("key2", "value2") # 6 chars cache.add("key3", "value23") # 7 chars self.assertEqual(5 + 6 + 7, cache._value_size) cache["key2"] # reference key2 so it gets a newer reference time cache.add("key4", "value234") # 8 chars, over limit # We have to remove 2 keys to get back under limit self.assertEqual(6 + 8, cache._value_size) self.assertEqual({"key2": "value2", "key4": "value234"}, cache.items()) def test_adding_clears_to_after_cleanup_size(self): cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10) cache.add("key1", "value") # 5 chars cache.add("key2", "value2") # 6 chars cache.add("key3", "value23") # 7 chars self.assertEqual(5 + 6 + 7, cache._value_size) cache["key2"] # reference key2 so it gets a newer reference time cache.add("key4", "value234") # 8 chars, over limit # We have to remove 3 keys to get back under limit self.assertEqual(8, cache._value_size) self.assertEqual({"key4": "value234"}, cache.items()) def test_custom_sizes(self): def size_of_list(lst): return sum(len(x) for x in lst) cache = lru_cache.LRUSizeCache( max_size=20, after_cleanup_size=10, compute_size=size_of_list ) cache.add("key1", ["val", "ue"]) # 5 chars cache.add("key2", ["val", "ue2"]) # 6 chars cache.add("key3", ["val", "ue23"]) # 7 chars self.assertEqual(5 + 6 + 7, cache._value_size) cache["key2"] # reference key2 so it gets a newer reference time cache.add("key4", ["value", "234"]) # 8 chars, over limit # We have to remove 3 keys to get back under limit self.assertEqual(8, cache._value_size) self.assertEqual({"key4": ["value", "234"]}, cache.items()) def test_cleanup(self): cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10) # Add these in order cache.add("key1", "value") # 5 chars cache.add("key2", "value2") # 6 chars cache.add("key3", "value23") # 7 chars self.assertEqual(5 + 6 + 7, cache._value_size) cache.cleanup() # Only the most recent fits after cleaning up self.assertEqual(7, cache._value_size) def test_keys(self): cache = lru_cache.LRUSizeCache(max_size=10) cache[1] = "a" cache[2] = "b" cache[3] = "cdef" self.assertEqual([1, 2, 3], sorted(cache.keys())) def test_resize_smaller(self): cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=9) cache[1] = "abc" cache[2] = "def" cache[3] = "ghi" cache[4] = "jkl" # Triggers a cleanup self.assertEqual([2, 3, 4], sorted(cache.keys())) # Resize should also cleanup again cache.resize(max_size=6, after_cleanup_size=4) self.assertEqual([4], sorted(cache.keys())) # Adding should use the new max size cache[5] = "mno" self.assertEqual([4, 5], sorted(cache.keys())) cache[6] = "pqr" self.assertEqual([6], sorted(cache.keys())) def test_resize_larger(self): cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=9) cache[1] = "abc" cache[2] = "def" cache[3] = "ghi" cache[4] = "jkl" # Triggers a cleanup self.assertEqual([2, 3, 4], sorted(cache.keys())) cache.resize(max_size=15, after_cleanup_size=12) self.assertEqual([2, 3, 4], sorted(cache.keys())) cache[5] = "mno" cache[6] = "pqr" self.assertEqual([2, 3, 4, 5, 6], sorted(cache.keys())) cache[7] = "stu" self.assertEqual([4, 5, 6, 7], sorted(cache.keys())) diff --git a/dulwich/tests/test_mailmap.py b/dulwich/tests/test_mailmap.py index 4b325b2a..bd44689b 100644 --- a/dulwich/tests/test_mailmap.py +++ b/dulwich/tests/test_mailmap.py @@ -1,89 +1,101 @@ # test_mailmap.py -- Tests for dulwich.mailmap # Copyright (C) 2018 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Tests for dulwich.mailmap.""" from io import BytesIO from unittest import TestCase from dulwich.mailmap import Mailmap, read_mailmap class ReadMailmapTests(TestCase): def test_read(self): b = BytesIO( b"""\ Jane Doe Joe R. Developer # A comment # Comment Some Dude nick1 Other Author nick2 Other Author Santa Claus """ ) self.assertEqual( [ ((b"Jane Doe", b"jane@desktop.(none)"), None), ((b"Joe R. Developer", b"joe@example.com"), None), ((None, b"cto@company.xx"), (None, b"cto@coompany.xx")), - ((b"Some Dude", b"some@dude.xx"), (b"nick1", b"bugs@company.xx")), - ((b"Other Author", b"other@author.xx"), (b"nick2", b"bugs@company.xx")), - ((b"Other Author", b"other@author.xx"), (None, b"nick2@company.xx")), + ( + (b"Some Dude", b"some@dude.xx"), + (b"nick1", b"bugs@company.xx"), + ), + ( + (b"Other Author", b"other@author.xx"), + (b"nick2", b"bugs@company.xx"), + ), + ( + (b"Other Author", b"other@author.xx"), + (None, b"nick2@company.xx"), + ), ( (b"Santa Claus", b"santa.claus@northpole.xx"), (None, b"me@company.xx"), ), ], list(read_mailmap(b)), ) class MailmapTests(TestCase): def test_lookup(self): m = Mailmap() m.add_entry((b"Jane Doe", b"jane@desktop.(none)"), (None, None)) m.add_entry((b"Joe R. Developer", b"joe@example.com"), None) m.add_entry((None, b"cto@company.xx"), (None, b"cto@coompany.xx")) m.add_entry((b"Some Dude", b"some@dude.xx"), (b"nick1", b"bugs@company.xx")) m.add_entry( - (b"Other Author", b"other@author.xx"), (b"nick2", b"bugs@company.xx") + (b"Other Author", b"other@author.xx"), + (b"nick2", b"bugs@company.xx"), ) m.add_entry((b"Other Author", b"other@author.xx"), (None, b"nick2@company.xx")) m.add_entry( - (b"Santa Claus", b"santa.claus@northpole.xx"), (None, b"me@company.xx") + (b"Santa Claus", b"santa.claus@northpole.xx"), + (None, b"me@company.xx"), ) self.assertEqual( b"Jane Doe ", m.lookup(b"Jane Doe "), ) self.assertEqual( - b"Jane Doe ", m.lookup(b"Jane Doe ") + b"Jane Doe ", + m.lookup(b"Jane Doe "), ) self.assertEqual( b"Jane Doe ", m.lookup(b"Jane D. "), ) self.assertEqual( b"Some Dude ", m.lookup(b"nick1 ") ) self.assertEqual(b"CTO ", m.lookup(b"CTO ")) diff --git a/dulwich/tests/test_missing_obj_finder.py b/dulwich/tests/test_missing_obj_finder.py index 31fc7e1c..742ce9ac 100644 --- a/dulwich/tests/test_missing_obj_finder.py +++ b/dulwich/tests/test_missing_obj_finder.py @@ -1,317 +1,318 @@ # test_missing_obj_finder.py -- tests for MissingObjectFinder # Copyright (C) 2012 syntevo GmbH # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # from dulwich.object_store import ( MemoryObjectStore, ) from dulwich.objects import ( Blob, ) from dulwich.tests import TestCase from dulwich.tests.utils import ( make_object, make_tag, build_commit_graph, ) class MissingObjectFinderTest(TestCase): def setUp(self): super(MissingObjectFinderTest, self).setUp() self.store = MemoryObjectStore() self.commits = [] def cmt(self, n): return self.commits[n - 1] def assertMissingMatch(self, haves, wants, expected): for sha, path in self.store.find_missing_objects(haves, wants, set()): self.assertTrue( - sha in expected, "(%s,%s) erroneously reported as missing" % (sha, path) + sha in expected, + "(%s,%s) erroneously reported as missing" % (sha, path), ) expected.remove(sha) self.assertEqual( len(expected), 0, "some objects are not reported as missing: %s" % (expected,), ) class MOFLinearRepoTest(MissingObjectFinderTest): def setUp(self): super(MOFLinearRepoTest, self).setUp() # present in 1, removed in 3 f1_1 = make_object(Blob, data=b"f1") # present in all revisions, changed in 2 and 3 f2_1 = make_object(Blob, data=b"f2") f2_2 = make_object(Blob, data=b"f2-changed") f2_3 = make_object(Blob, data=b"f2-changed-again") # added in 2, left unmodified in 3 f3_2 = make_object(Blob, data=b"f3") commit_spec = [[1], [2, 1], [3, 2]] trees = { 1: [(b"f1", f1_1), (b"f2", f2_1)], 2: [(b"f1", f1_1), (b"f2", f2_2), (b"f3", f3_2)], 3: [(b"f2", f2_3), (b"f3", f3_2)], } # commit 1: f1 and f2 # commit 2: f3 added, f2 changed. Missing shall report commit id and a # tree referenced by commit # commit 3: f1 removed, f2 changed. Commit sha and root tree sha shall # be reported as modified self.commits = build_commit_graph(self.store, commit_spec, trees) self.missing_1_2 = [self.cmt(2).id, self.cmt(2).tree, f2_2.id, f3_2.id] self.missing_2_3 = [self.cmt(3).id, self.cmt(3).tree, f2_3.id] self.missing_1_3 = [ self.cmt(2).id, self.cmt(3).id, self.cmt(2).tree, self.cmt(3).tree, f2_2.id, f3_2.id, f2_3.id, ] def test_1_to_2(self): self.assertMissingMatch([self.cmt(1).id], [self.cmt(2).id], self.missing_1_2) def test_2_to_3(self): self.assertMissingMatch([self.cmt(2).id], [self.cmt(3).id], self.missing_2_3) def test_1_to_3(self): self.assertMissingMatch([self.cmt(1).id], [self.cmt(3).id], self.missing_1_3) def test_bogus_haves(self): """Ensure non-existent SHA in haves are tolerated""" bogus_sha = self.cmt(2).id[::-1] haves = [self.cmt(1).id, bogus_sha] wants = [self.cmt(3).id] self.assertMissingMatch(haves, wants, self.missing_1_3) def test_bogus_wants_failure(self): """Ensure non-existent SHA in wants are not tolerated""" bogus_sha = self.cmt(2).id[::-1] haves = [self.cmt(1).id] wants = [self.cmt(3).id, bogus_sha] self.assertRaises( KeyError, self.store.find_missing_objects, haves, wants, set() ) def test_no_changes(self): self.assertMissingMatch([self.cmt(3).id], [self.cmt(3).id], []) class MOFMergeForkRepoTest(MissingObjectFinderTest): # 1 --- 2 --- 4 --- 6 --- 7 # \ / # 3 --- # \ # 5 def setUp(self): super(MOFMergeForkRepoTest, self).setUp() f1_1 = make_object(Blob, data=b"f1") f1_2 = make_object(Blob, data=b"f1-2") f1_4 = make_object(Blob, data=b"f1-4") f1_7 = make_object(Blob, data=b"f1-2") # same data as in rev 2 f2_1 = make_object(Blob, data=b"f2") f2_3 = make_object(Blob, data=b"f2-3") f3_3 = make_object(Blob, data=b"f3") f3_5 = make_object(Blob, data=b"f3-5") commit_spec = [[1], [2, 1], [3, 2], [4, 2], [5, 3], [6, 3, 4], [7, 6]] trees = { 1: [(b"f1", f1_1), (b"f2", f2_1)], 2: [(b"f1", f1_2), (b"f2", f2_1)], # f1 changed # f3 added, f2 changed 3: [(b"f1", f1_2), (b"f2", f2_3), (b"f3", f3_3)], 4: [(b"f1", f1_4), (b"f2", f2_1)], # f1 changed 5: [(b"f1", f1_2), (b"f3", f3_5)], # f2 removed, f3 changed # merged 3 and 4 6: [(b"f1", f1_4), (b"f2", f2_3), (b"f3", f3_3)], # f1 changed to match rev2. f3 removed 7: [(b"f1", f1_7), (b"f2", f2_3)], } self.commits = build_commit_graph(self.store, commit_spec, trees) self.f1_2_id = f1_2.id self.f1_4_id = f1_4.id self.f1_7_id = f1_7.id self.f2_3_id = f2_3.id self.f3_3_id = f3_3.id self.assertEqual(f1_2.id, f1_7.id, "[sanity]") def test_have6_want7(self): # have 6, want 7. Ideally, shall not report f1_7 as it's the same as # f1_2, however, to do so, MissingObjectFinder shall not record trees # of common commits only, but also all parent trees and tree items, # which is an overkill (i.e. in sha_done it records f1_4 as known, and # doesn't record f1_2 was known prior to that, hence can't detect f1_7 # is in fact f1_2 and shall not be reported) self.assertMissingMatch( [self.cmt(6).id], [self.cmt(7).id], [self.cmt(7).id, self.cmt(7).tree, self.f1_7_id], ) def test_have4_want7(self): # have 4, want 7. Shall not include rev5 as it is not in the tree # between 4 and 7 (well, it is, but its SHA's are irrelevant for 4..7 # commit hierarchy) self.assertMissingMatch( [self.cmt(4).id], [self.cmt(7).id], [ self.cmt(7).id, self.cmt(6).id, self.cmt(3).id, self.cmt(7).tree, self.cmt(6).tree, self.cmt(3).tree, self.f2_3_id, self.f3_3_id, ], ) def test_have1_want6(self): # have 1, want 6. Shall not include rev5 self.assertMissingMatch( [self.cmt(1).id], [self.cmt(6).id], [ self.cmt(6).id, self.cmt(4).id, self.cmt(3).id, self.cmt(2).id, self.cmt(6).tree, self.cmt(4).tree, self.cmt(3).tree, self.cmt(2).tree, self.f1_2_id, self.f1_4_id, self.f2_3_id, self.f3_3_id, ], ) def test_have3_want6(self): # have 3, want 7. Shall not report rev2 and its tree, because # haves(3) means has parents, i.e. rev2, too # BUT shall report any changes descending rev2 (excluding rev3) # Shall NOT report f1_7 as it's technically == f1_2 self.assertMissingMatch( [self.cmt(3).id], [self.cmt(7).id], [ self.cmt(7).id, self.cmt(6).id, self.cmt(4).id, self.cmt(7).tree, self.cmt(6).tree, self.cmt(4).tree, self.f1_4_id, ], ) def test_have5_want7(self): # have 5, want 7. Common parent is rev2, hence children of rev2 from # a descent line other than rev5 shall be reported # expects f1_4 from rev6. f3_5 is known in rev5; # f1_7 shall be the same as f1_2 (known, too) self.assertMissingMatch( [self.cmt(5).id], [self.cmt(7).id], [ self.cmt(7).id, self.cmt(6).id, self.cmt(4).id, self.cmt(7).tree, self.cmt(6).tree, self.cmt(4).tree, self.f1_4_id, ], ) class MOFTagsTest(MissingObjectFinderTest): def setUp(self): super(MOFTagsTest, self).setUp() f1_1 = make_object(Blob, data=b"f1") commit_spec = [[1]] trees = {1: [(b"f1", f1_1)]} self.commits = build_commit_graph(self.store, commit_spec, trees) self._normal_tag = make_tag(self.cmt(1)) self.store.add_object(self._normal_tag) self._tag_of_tag = make_tag(self._normal_tag) self.store.add_object(self._tag_of_tag) self._tag_of_tree = make_tag(self.store[self.cmt(1).tree]) self.store.add_object(self._tag_of_tree) self._tag_of_blob = make_tag(f1_1) self.store.add_object(self._tag_of_blob) self._tag_of_tag_of_blob = make_tag(self._tag_of_blob) self.store.add_object(self._tag_of_tag_of_blob) self.f1_1_id = f1_1.id def test_tagged_commit(self): # The user already has the tagged commit, all they want is the tag, # so send them only the tag object. self.assertMissingMatch( [self.cmt(1).id], [self._normal_tag.id], [self._normal_tag.id] ) # The remaining cases are unusual, but do happen in the wild. def test_tagged_tag(self): # User already has tagged tag, send only tag of tag self.assertMissingMatch( [self._normal_tag.id], [self._tag_of_tag.id], [self._tag_of_tag.id] ) # User needs both tags, but already has commit self.assertMissingMatch( [self.cmt(1).id], [self._tag_of_tag.id], [self._normal_tag.id, self._tag_of_tag.id], ) def test_tagged_tree(self): self.assertMissingMatch( [], [self._tag_of_tree.id], [self._tag_of_tree.id, self.cmt(1).tree, self.f1_1_id], ) def test_tagged_blob(self): self.assertMissingMatch( [], [self._tag_of_blob.id], [self._tag_of_blob.id, self.f1_1_id] ) def test_tagged_tagged_blob(self): self.assertMissingMatch( [], [self._tag_of_tag_of_blob.id], [self._tag_of_tag_of_blob.id, self._tag_of_blob.id, self.f1_1_id], ) diff --git a/dulwich/tests/test_object_store.py b/dulwich/tests/test_object_store.py index 7a6de68f..0b11c023 100644 --- a/dulwich/tests/test_object_store.py +++ b/dulwich/tests/test_object_store.py @@ -1,719 +1,735 @@ # test_object_store.py -- tests for object_store.py # Copyright (C) 2008 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Tests for the object store interface.""" from contextlib import closing from io import BytesIO import os import shutil import stat import tempfile from dulwich.index import ( commit_tree, ) from dulwich.errors import ( NotTreeError, ) from dulwich.objects import ( sha_to_hex, Blob, Tree, TreeEntry, EmptyFileException, ) from dulwich.object_store import ( DiskObjectStore, MemoryObjectStore, OverlayObjectStore, ObjectStoreGraphWalker, commit_tree_changes, read_packs_file, tree_lookup_path, ) from dulwich.pack import ( REF_DELTA, write_pack_objects, ) from dulwich.tests import ( TestCase, ) from dulwich.tests.utils import ( make_object, make_tag, build_pack, ) testobject = make_object(Blob, data=b"yummy data") class ObjectStoreTests(object): def test_determine_wants_all(self): self.assertEqual( - [b"1" * 40], self.store.determine_wants_all({b"refs/heads/foo": b"1" * 40}) + [b"1" * 40], + self.store.determine_wants_all({b"refs/heads/foo": b"1" * 40}), ) def test_determine_wants_all_zero(self): self.assertEqual( [], self.store.determine_wants_all({b"refs/heads/foo": b"0" * 40}) ) def test_iter(self): self.assertEqual([], list(self.store)) def test_get_nonexistant(self): self.assertRaises(KeyError, lambda: self.store[b"a" * 40]) def test_contains_nonexistant(self): self.assertFalse((b"a" * 40) in self.store) def test_add_objects_empty(self): self.store.add_objects([]) def test_add_commit(self): # TODO: Argh, no way to construct Git commit objects without # access to a serialized form. self.store.add_objects([]) def test_store_resilience(self): """Test if updating an existing stored object doesn't erase the object from the store. """ test_object = make_object(Blob, data=b"data") self.store.add_object(test_object) test_object_id = test_object.id test_object.data = test_object.data + b"update" stored_test_object = self.store[test_object_id] self.assertNotEqual(test_object.id, stored_test_object.id) self.assertEqual(stored_test_object.id, test_object_id) def test_add_object(self): self.store.add_object(testobject) self.assertEqual(set([testobject.id]), set(self.store)) self.assertTrue(testobject.id in self.store) r = self.store[testobject.id] self.assertEqual(r, testobject) def test_add_objects(self): data = [(testobject, "mypath")] self.store.add_objects(data) self.assertEqual(set([testobject.id]), set(self.store)) self.assertTrue(testobject.id in self.store) r = self.store[testobject.id] self.assertEqual(r, testobject) def test_tree_changes(self): blob_a1 = make_object(Blob, data=b"a1") blob_a2 = make_object(Blob, data=b"a2") blob_b = make_object(Blob, data=b"b") for blob in [blob_a1, blob_a2, blob_b]: self.store.add_object(blob) blobs_1 = [(b"a", blob_a1.id, 0o100644), (b"b", blob_b.id, 0o100644)] tree1_id = commit_tree(self.store, blobs_1) blobs_2 = [(b"a", blob_a2.id, 0o100644), (b"b", blob_b.id, 0o100644)] tree2_id = commit_tree(self.store, blobs_2) - change_a = ((b"a", b"a"), (0o100644, 0o100644), (blob_a1.id, blob_a2.id)) + change_a = ( + (b"a", b"a"), + (0o100644, 0o100644), + (blob_a1.id, blob_a2.id), + ) self.assertEqual([change_a], list(self.store.tree_changes(tree1_id, tree2_id))) self.assertEqual( - [change_a, ((b"b", b"b"), (0o100644, 0o100644), (blob_b.id, blob_b.id))], + [ + change_a, + ((b"b", b"b"), (0o100644, 0o100644), (blob_b.id, blob_b.id)), + ], list(self.store.tree_changes(tree1_id, tree2_id, want_unchanged=True)), ) def test_iter_tree_contents(self): blob_a = make_object(Blob, data=b"a") blob_b = make_object(Blob, data=b"b") blob_c = make_object(Blob, data=b"c") for blob in [blob_a, blob_b, blob_c]: self.store.add_object(blob) blobs = [ (b"a", blob_a.id, 0o100644), (b"ad/b", blob_b.id, 0o100644), (b"ad/bd/c", blob_c.id, 0o100755), (b"ad/c", blob_c.id, 0o100644), (b"c", blob_c.id, 0o100644), ] tree_id = commit_tree(self.store, blobs) self.assertEqual( [TreeEntry(p, m, h) for (p, h, m) in blobs], list(self.store.iter_tree_contents(tree_id)), ) def test_iter_tree_contents_include_trees(self): blob_a = make_object(Blob, data=b"a") blob_b = make_object(Blob, data=b"b") blob_c = make_object(Blob, data=b"c") for blob in [blob_a, blob_b, blob_c]: self.store.add_object(blob) blobs = [ (b"a", blob_a.id, 0o100644), (b"ad/b", blob_b.id, 0o100644), (b"ad/bd/c", blob_c.id, 0o100755), ] tree_id = commit_tree(self.store, blobs) tree = self.store[tree_id] tree_ad = self.store[tree[b"ad"][1]] tree_bd = self.store[tree_ad[b"bd"][1]] expected = [ TreeEntry(b"", 0o040000, tree_id), TreeEntry(b"a", 0o100644, blob_a.id), TreeEntry(b"ad", 0o040000, tree_ad.id), TreeEntry(b"ad/b", 0o100644, blob_b.id), TreeEntry(b"ad/bd", 0o040000, tree_bd.id), TreeEntry(b"ad/bd/c", 0o100755, blob_c.id), ] actual = self.store.iter_tree_contents(tree_id, include_trees=True) self.assertEqual(expected, list(actual)) def make_tag(self, name, obj): tag = make_tag(obj, name=name) self.store.add_object(tag) return tag def test_peel_sha(self): self.store.add_object(testobject) tag1 = self.make_tag(b"1", testobject) tag2 = self.make_tag(b"2", testobject) tag3 = self.make_tag(b"3", testobject) for obj in [testobject, tag1, tag2, tag3]: self.assertEqual(testobject, self.store.peel_sha(obj.id)) def test_get_raw(self): self.store.add_object(testobject) self.assertEqual( (Blob.type_num, b"yummy data"), self.store.get_raw(testobject.id) ) def test_close(self): # For now, just check that close doesn't barf. self.store.add_object(testobject) self.store.close() class OverlayObjectStoreTests(ObjectStoreTests, TestCase): def setUp(self): TestCase.setUp(self) self.bases = [MemoryObjectStore(), MemoryObjectStore()] self.store = OverlayObjectStore(self.bases, self.bases[0]) class MemoryObjectStoreTests(ObjectStoreTests, TestCase): def setUp(self): TestCase.setUp(self) self.store = MemoryObjectStore() def test_add_pack(self): o = MemoryObjectStore() f, commit, abort = o.add_pack() try: b = make_object(Blob, data=b"more yummy data") write_pack_objects(f, [(b, None)]) except BaseException: abort() raise else: commit() def test_add_pack_emtpy(self): o = MemoryObjectStore() f, commit, abort = o.add_pack() commit() def test_add_thin_pack(self): o = MemoryObjectStore() blob = make_object(Blob, data=b"yummy data") o.add_object(blob) f = BytesIO() entries = build_pack( f, [ (REF_DELTA, (blob.id, b"more yummy data")), ], store=o, ) o.add_thin_pack(f.read, None) packed_blob_sha = sha_to_hex(entries[0][3]) self.assertEqual( (Blob.type_num, b"more yummy data"), o.get_raw(packed_blob_sha) ) def test_add_thin_pack_empty(self): o = MemoryObjectStore() f = BytesIO() entries = build_pack(f, [], store=o) self.assertEqual([], entries) o.add_thin_pack(f.read, None) class PackBasedObjectStoreTests(ObjectStoreTests): def tearDown(self): for pack in self.store.packs: pack.close() def test_empty_packs(self): self.assertEqual([], list(self.store.packs)) def test_pack_loose_objects(self): b1 = make_object(Blob, data=b"yummy data") self.store.add_object(b1) b2 = make_object(Blob, data=b"more yummy data") self.store.add_object(b2) b3 = make_object(Blob, data=b"even more yummy data") b4 = make_object(Blob, data=b"and more yummy data") self.store.add_objects([(b3, None), (b4, None)]) self.assertEqual({b1.id, b2.id, b3.id, b4.id}, set(self.store)) self.assertEqual(1, len(self.store.packs)) self.assertEqual(2, self.store.pack_loose_objects()) self.assertNotEqual([], list(self.store.packs)) self.assertEqual(0, self.store.pack_loose_objects()) def test_repack(self): b1 = make_object(Blob, data=b"yummy data") self.store.add_object(b1) b2 = make_object(Blob, data=b"more yummy data") self.store.add_object(b2) b3 = make_object(Blob, data=b"even more yummy data") b4 = make_object(Blob, data=b"and more yummy data") self.store.add_objects([(b3, None), (b4, None)]) b5 = make_object(Blob, data=b"and more data") b6 = make_object(Blob, data=b"and some more data") self.store.add_objects([(b5, None), (b6, None)]) self.assertEqual({b1.id, b2.id, b3.id, b4.id, b5.id, b6.id}, set(self.store)) self.assertEqual(2, len(self.store.packs)) self.assertEqual(6, self.store.repack()) self.assertEqual(1, len(self.store.packs)) self.assertEqual(0, self.store.pack_loose_objects()) def test_repack_existing(self): b1 = make_object(Blob, data=b"yummy data") self.store.add_object(b1) b2 = make_object(Blob, data=b"more yummy data") self.store.add_object(b2) self.store.add_objects([(b1, None), (b2, None)]) self.store.add_objects([(b2, None)]) self.assertEqual({b1.id, b2.id}, set(self.store)) self.assertEqual(2, len(self.store.packs)) self.assertEqual(2, self.store.repack()) self.assertEqual(1, len(self.store.packs)) self.assertEqual(0, self.store.pack_loose_objects()) self.assertEqual({b1.id, b2.id}, set(self.store)) self.assertEqual(1, len(self.store.packs)) self.assertEqual(2, self.store.repack()) self.assertEqual(1, len(self.store.packs)) self.assertEqual(0, self.store.pack_loose_objects()) class DiskObjectStoreTests(PackBasedObjectStoreTests, TestCase): def setUp(self): TestCase.setUp(self) self.store_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.store_dir) self.store = DiskObjectStore.init(self.store_dir) def tearDown(self): TestCase.tearDown(self) PackBasedObjectStoreTests.tearDown(self) def test_loose_compression_level(self): alternate_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, alternate_dir) alternate_store = DiskObjectStore(alternate_dir, loose_compression_level=6) b2 = make_object(Blob, data=b"yummy data") alternate_store.add_object(b2) def test_alternates(self): alternate_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, alternate_dir) alternate_store = DiskObjectStore(alternate_dir) b2 = make_object(Blob, data=b"yummy data") alternate_store.add_object(b2) store = DiskObjectStore(self.store_dir) self.assertRaises(KeyError, store.__getitem__, b2.id) store.add_alternate_path(alternate_dir) self.assertIn(b2.id, store) self.assertEqual(b2, store[b2.id]) def test_read_alternate_paths(self): store = DiskObjectStore(self.store_dir) abs_path = os.path.abspath(os.path.normpath("/abspath")) # ensures in particular existence of the alternates file store.add_alternate_path(abs_path) self.assertEqual(set(store._read_alternate_paths()), {abs_path}) store.add_alternate_path("relative-path") self.assertIn( os.path.join(store.path, "relative-path"), set(store._read_alternate_paths()), ) # arguably, add_alternate_path() could strip comments. # Meanwhile it's more convenient to use it than to import INFODIR store.add_alternate_path("# comment") for alt_path in store._read_alternate_paths(): self.assertNotIn("#", alt_path) def test_corrupted_object_raise_exception(self): """Corrupted sha1 disk file should raise specific exception""" self.store.add_object(testobject) self.assertEqual( (Blob.type_num, b"yummy data"), self.store.get_raw(testobject.id) ) self.assertTrue(self.store.contains_loose(testobject.id)) self.assertIsNotNone(self.store._get_loose_object(testobject.id)) path = self.store._get_shafile_path(testobject.id) with open(path, "wb") as f: # corrupt the file f.write(b"") expected_error_msg = "Corrupted empty file detected" try: self.store.contains_loose(testobject.id) except EmptyFileException as e: self.assertEqual(str(e), expected_error_msg) try: self.store._get_loose_object(testobject.id) except EmptyFileException as e: self.assertEqual(str(e), expected_error_msg) # this does not change iteration on loose objects though self.assertEqual([testobject.id], list(self.store._iter_loose_objects())) def test_tempfile_in_loose_store(self): self.store.add_object(testobject) self.assertEqual([testobject.id], list(self.store._iter_loose_objects())) # add temporary files to the loose store for i in range(256): dirname = os.path.join(self.store_dir, "%02x" % i) if not os.path.isdir(dirname): os.makedirs(dirname) fd, n = tempfile.mkstemp(prefix="tmp_obj_", dir=dirname) os.close(fd) self.assertEqual([testobject.id], list(self.store._iter_loose_objects())) def test_add_alternate_path(self): store = DiskObjectStore(self.store_dir) self.assertEqual([], list(store._read_alternate_paths())) store.add_alternate_path("/foo/path") self.assertEqual(["/foo/path"], list(store._read_alternate_paths())) store.add_alternate_path("/bar/path") self.assertEqual( ["/foo/path", "/bar/path"], list(store._read_alternate_paths()) ) def test_rel_alternative_path(self): alternate_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, alternate_dir) alternate_store = DiskObjectStore(alternate_dir) b2 = make_object(Blob, data=b"yummy data") alternate_store.add_object(b2) store = DiskObjectStore(self.store_dir) self.assertRaises(KeyError, store.__getitem__, b2.id) store.add_alternate_path(os.path.relpath(alternate_dir, self.store_dir)) self.assertEqual(list(alternate_store), list(store.alternates[0])) self.assertIn(b2.id, store) self.assertEqual(b2, store[b2.id]) def test_pack_dir(self): o = DiskObjectStore(self.store_dir) self.assertEqual(os.path.join(self.store_dir, "pack"), o.pack_dir) def test_add_pack(self): o = DiskObjectStore(self.store_dir) f, commit, abort = o.add_pack() try: b = make_object(Blob, data=b"more yummy data") write_pack_objects(f, [(b, None)]) except BaseException: abort() raise else: commit() def test_add_thin_pack(self): o = DiskObjectStore(self.store_dir) try: blob = make_object(Blob, data=b"yummy data") o.add_object(blob) f = BytesIO() entries = build_pack( f, [ (REF_DELTA, (blob.id, b"more yummy data")), ], store=o, ) with o.add_thin_pack(f.read, None) as pack: packed_blob_sha = sha_to_hex(entries[0][3]) pack.check_length_and_checksum() self.assertEqual(sorted([blob.id, packed_blob_sha]), list(pack)) self.assertTrue(o.contains_packed(packed_blob_sha)) self.assertTrue(o.contains_packed(blob.id)) self.assertEqual( - (Blob.type_num, b"more yummy data"), o.get_raw(packed_blob_sha) + (Blob.type_num, b"more yummy data"), + o.get_raw(packed_blob_sha), ) finally: o.close() def test_add_thin_pack_empty(self): with closing(DiskObjectStore(self.store_dir)) as o: f = BytesIO() entries = build_pack(f, [], store=o) self.assertEqual([], entries) o.add_thin_pack(f.read, None) class TreeLookupPathTests(TestCase): def setUp(self): TestCase.setUp(self) self.store = MemoryObjectStore() blob_a = make_object(Blob, data=b"a") blob_b = make_object(Blob, data=b"b") blob_c = make_object(Blob, data=b"c") for blob in [blob_a, blob_b, blob_c]: self.store.add_object(blob) blobs = [ (b"a", blob_a.id, 0o100644), (b"ad/b", blob_b.id, 0o100644), (b"ad/bd/c", blob_c.id, 0o100755), (b"ad/c", blob_c.id, 0o100644), (b"c", blob_c.id, 0o100644), ] self.tree_id = commit_tree(self.store, blobs) def get_object(self, sha): return self.store[sha] def test_lookup_blob(self): o_id = tree_lookup_path(self.get_object, self.tree_id, b"a")[1] self.assertTrue(isinstance(self.store[o_id], Blob)) def test_lookup_tree(self): o_id = tree_lookup_path(self.get_object, self.tree_id, b"ad")[1] self.assertTrue(isinstance(self.store[o_id], Tree)) o_id = tree_lookup_path(self.get_object, self.tree_id, b"ad/bd")[1] self.assertTrue(isinstance(self.store[o_id], Tree)) o_id = tree_lookup_path(self.get_object, self.tree_id, b"ad/bd/")[1] self.assertTrue(isinstance(self.store[o_id], Tree)) def test_lookup_nonexistent(self): self.assertRaises( KeyError, tree_lookup_path, self.get_object, self.tree_id, b"j" ) def test_lookup_not_tree(self): self.assertRaises( - NotTreeError, tree_lookup_path, self.get_object, self.tree_id, b"ad/b/j" + NotTreeError, + tree_lookup_path, + self.get_object, + self.tree_id, + b"ad/b/j", ) class ObjectStoreGraphWalkerTests(TestCase): def get_walker(self, heads, parent_map): new_parent_map = dict( [(k * 40, [(p * 40) for p in ps]) for (k, ps) in parent_map.items()] ) return ObjectStoreGraphWalker( [x * 40 for x in heads], new_parent_map.__getitem__ ) def test_ack_invalid_value(self): gw = self.get_walker([], {}) self.assertRaises(ValueError, gw.ack, "tooshort") def test_empty(self): gw = self.get_walker([], {}) self.assertIs(None, next(gw)) gw.ack(b"a" * 40) self.assertIs(None, next(gw)) def test_descends(self): gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": []}) self.assertEqual(b"a" * 40, next(gw)) self.assertEqual(b"b" * 40, next(gw)) def test_present(self): gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": []}) gw.ack(b"a" * 40) self.assertIs(None, next(gw)) def test_parent_present(self): gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": []}) self.assertEqual(b"a" * 40, next(gw)) gw.ack(b"a" * 40) self.assertIs(None, next(gw)) def test_child_ack_later(self): gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": [b"c"], b"c": []}) self.assertEqual(b"a" * 40, next(gw)) self.assertEqual(b"b" * 40, next(gw)) gw.ack(b"a" * 40) self.assertIs(None, next(gw)) def test_only_once(self): # a b # | | # c d # \ / # e gw = self.get_walker( [b"a", b"b"], { b"a": [b"c"], b"b": [b"d"], b"c": [b"e"], b"d": [b"e"], b"e": [], }, ) walk = [] acked = False walk.append(next(gw)) walk.append(next(gw)) # A branch (a, c) or (b, d) may be done after 2 steps or 3 depending on # the order walked: 3-step walks include (a, b, c) and (b, a, d), etc. if walk == [b"a" * 40, b"c" * 40] or walk == [b"b" * 40, b"d" * 40]: gw.ack(walk[0]) acked = True walk.append(next(gw)) if not acked and walk[2] == b"c" * 40: gw.ack(b"a" * 40) elif not acked and walk[2] == b"d" * 40: gw.ack(b"b" * 40) walk.append(next(gw)) self.assertIs(None, next(gw)) self.assertEqual([b"a" * 40, b"b" * 40, b"c" * 40, b"d" * 40], sorted(walk)) self.assertLess(walk.index(b"a" * 40), walk.index(b"c" * 40)) self.assertLess(walk.index(b"b" * 40), walk.index(b"d" * 40)) class CommitTreeChangesTests(TestCase): def setUp(self): super(CommitTreeChangesTests, self).setUp() self.store = MemoryObjectStore() self.blob_a = make_object(Blob, data=b"a") self.blob_b = make_object(Blob, data=b"b") self.blob_c = make_object(Blob, data=b"c") for blob in [self.blob_a, self.blob_b, self.blob_c]: self.store.add_object(blob) blobs = [ (b"a", self.blob_a.id, 0o100644), (b"ad/b", self.blob_b.id, 0o100644), (b"ad/bd/c", self.blob_c.id, 0o100755), (b"ad/c", self.blob_c.id, 0o100644), (b"c", self.blob_c.id, 0o100644), ] self.tree_id = commit_tree(self.store, blobs) def test_no_changes(self): self.assertEqual( self.store[self.tree_id], commit_tree_changes(self.store, self.store[self.tree_id], []), ) def test_add_blob(self): blob_d = make_object(Blob, data=b"d") new_tree = commit_tree_changes( self.store, self.store[self.tree_id], [(b"d", 0o100644, blob_d.id)] ) self.assertEqual( - new_tree[b"d"], (33188, b"c59d9b6344f1af00e504ba698129f07a34bbed8d") + new_tree[b"d"], + (33188, b"c59d9b6344f1af00e504ba698129f07a34bbed8d"), ) def test_add_blob_in_dir(self): blob_d = make_object(Blob, data=b"d") new_tree = commit_tree_changes( - self.store, self.store[self.tree_id], [(b"e/f/d", 0o100644, blob_d.id)] + self.store, + self.store[self.tree_id], + [(b"e/f/d", 0o100644, blob_d.id)], ) self.assertEqual( new_tree.items(), [ TreeEntry(path=b"a", mode=stat.S_IFREG | 0o100644, sha=self.blob_a.id), TreeEntry( path=b"ad", mode=stat.S_IFDIR, sha=b"0e2ce2cd7725ff4817791be31ccd6e627e801f4a", ), TreeEntry(path=b"c", mode=stat.S_IFREG | 0o100644, sha=self.blob_c.id), TreeEntry( path=b"e", mode=stat.S_IFDIR, sha=b"6ab344e288724ac2fb38704728b8896e367ed108", ), ], ) e_tree = self.store[new_tree[b"e"][1]] self.assertEqual( e_tree.items(), [ TreeEntry( path=b"f", mode=stat.S_IFDIR, sha=b"24d2c94d8af232b15a0978c006bf61ef4479a0a5", ) ], ) f_tree = self.store[e_tree[b"f"][1]] self.assertEqual( f_tree.items(), [TreeEntry(path=b"d", mode=stat.S_IFREG | 0o100644, sha=blob_d.id)], ) def test_delete_blob(self): new_tree = commit_tree_changes( self.store, self.store[self.tree_id], [(b"ad/bd/c", None, None)] ) self.assertEqual(set(new_tree), {b"a", b"ad", b"c"}) ad_tree = self.store[new_tree[b"ad"][1]] self.assertEqual(set(ad_tree), {b"b", b"c"}) class TestReadPacksFile(TestCase): def test_read_packs(self): self.assertEqual( ["pack-1.pack"], list( read_packs_file( BytesIO( b"""P pack-1.pack """ ) ) ), ) diff --git a/dulwich/tests/test_objects.py b/dulwich/tests/test_objects.py index c4cf3513..d2c37455 100644 --- a/dulwich/tests/test_objects.py +++ b/dulwich/tests/test_objects.py @@ -1,1422 +1,1432 @@ # test_objects.py -- tests for objects.py # Copyright (C) 2007 James Westby # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Tests for git base objects.""" # TODO: Round-trip parse-serialize-parse and serialize-parse-serialize tests. from io import BytesIO import datetime from itertools import ( permutations, ) import os import stat import warnings from contextlib import contextmanager from dulwich.errors import ( ObjectFormatException, ) from dulwich.objects import ( Blob, Tree, Commit, ShaFile, Tag, TreeEntry, format_timezone, hex_to_sha, sha_to_hex, hex_to_filename, check_hexsha, check_identity, object_class, parse_timezone, pretty_format_tree_entry, parse_tree, _parse_tree_py, sorted_tree_items, _sorted_tree_items_py, MAX_TIME, ) from dulwich.tests import ( TestCase, ) from dulwich.tests.utils import ( make_commit, make_object, functest_builder, ext_functest_builder, ) a_sha = b"6f670c0fb53f9463760b7295fbb814e965fb20c8" b_sha = b"2969be3e8ee1c0222396a5611407e4769f14e54b" c_sha = b"954a536f7819d40e6f637f849ee187dd10066349" tree_sha = b"70c190eb48fa8bbb50ddc692a17b44cb781af7f6" tag_sha = b"71033db03a03c6a36721efcf1968dd8f8e0cf023" class TestHexToSha(TestCase): def test_simple(self): self.assertEqual(b"\xab\xcd" * 10, hex_to_sha(b"abcd" * 10)) def test_reverse(self): self.assertEqual(b"abcd" * 10, sha_to_hex(b"\xab\xcd" * 10)) class BlobReadTests(TestCase): """Test decompression of blobs""" def get_sha_file(self, cls, base, sha): dir = os.path.join(os.path.dirname(__file__), "data", base) return cls.from_path(hex_to_filename(dir, sha)) def get_blob(self, sha): """Return the blob named sha from the test data dir""" return self.get_sha_file(Blob, "blobs", sha) def get_tree(self, sha): return self.get_sha_file(Tree, "trees", sha) def get_tag(self, sha): return self.get_sha_file(Tag, "tags", sha) def commit(self, sha): return self.get_sha_file(Commit, "commits", sha) def test_decompress_simple_blob(self): b = self.get_blob(a_sha) self.assertEqual(b.data, b"test 1\n") self.assertEqual(b.sha().hexdigest().encode("ascii"), a_sha) def test_hash(self): b = self.get_blob(a_sha) self.assertEqual(hash(b.id), hash(b)) def test_parse_empty_blob_object(self): sha = b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391" b = self.get_blob(sha) self.assertEqual(b.data, b"") self.assertEqual(b.id, sha) self.assertEqual(b.sha().hexdigest().encode("ascii"), sha) def test_create_blob_from_string(self): string = b"test 2\n" b = Blob.from_string(string) self.assertEqual(b.data, string) self.assertEqual(b.sha().hexdigest().encode("ascii"), b_sha) def test_legacy_from_file(self): b1 = Blob.from_string(b"foo") b_raw = b1.as_legacy_object() b2 = b1.from_file(BytesIO(b_raw)) self.assertEqual(b1, b2) def test_legacy_from_file_compression_level(self): b1 = Blob.from_string(b"foo") b_raw = b1.as_legacy_object(compression_level=6) b2 = b1.from_file(BytesIO(b_raw)) self.assertEqual(b1, b2) def test_chunks(self): string = b"test 5\n" b = Blob.from_string(string) self.assertEqual([string], b.chunked) def test_splitlines(self): for case in [ [], [b"foo\nbar\n"], [b"bl\na", b"blie"], [b"bl\na", b"blie", b"bloe\n"], [b"", b"bl\na", b"blie", b"bloe\n"], [b"", b"", b"", b"bla\n"], [b"", b"", b"", b"bla\n", b""], [b"bl", b"", b"a\naaa"], [b"a\naaa", b"a"], ]: b = Blob() b.chunked = case self.assertEqual(b.data.splitlines(True), b.splitlines()) def test_set_chunks(self): b = Blob() b.chunked = [b"te", b"st", b" 5\n"] self.assertEqual(b"test 5\n", b.data) b.chunked = [b"te", b"st", b" 6\n"] self.assertEqual(b"test 6\n", b.as_raw_string()) self.assertEqual(b"test 6\n", bytes(b)) def test_parse_legacy_blob(self): string = b"test 3\n" b = self.get_blob(c_sha) self.assertEqual(b.data, string) self.assertEqual(b.sha().hexdigest().encode("ascii"), c_sha) def test_eq(self): blob1 = self.get_blob(a_sha) blob2 = self.get_blob(a_sha) self.assertEqual(blob1, blob2) def test_read_tree_from_file(self): t = self.get_tree(tree_sha) self.assertEqual(t.items()[0], (b"a", 33188, a_sha)) self.assertEqual(t.items()[1], (b"b", 33188, b_sha)) def test_read_tree_from_file_parse_count(self): old_deserialize = Tree._deserialize def reset_deserialize(): Tree._deserialize = old_deserialize self.addCleanup(reset_deserialize) self.deserialize_count = 0 def counting_deserialize(*args, **kwargs): self.deserialize_count += 1 return old_deserialize(*args, **kwargs) Tree._deserialize = counting_deserialize t = self.get_tree(tree_sha) self.assertEqual(t.items()[0], (b"a", 33188, a_sha)) self.assertEqual(t.items()[1], (b"b", 33188, b_sha)) self.assertEqual(self.deserialize_count, 1) def test_read_tag_from_file(self): t = self.get_tag(tag_sha) self.assertEqual( t.object, (Commit, b"51b668fd5bf7061b7d6fa525f88803e6cfadaa51") ) self.assertEqual(t.name, b"signed") self.assertEqual(t.tagger, b"Ali Sabil ") self.assertEqual(t.tag_time, 1231203091) self.assertEqual(t.message, b"This is a signed tag\n") self.assertEqual( t.signature, b"-----BEGIN PGP SIGNATURE-----\n" b"Version: GnuPG v1.4.9 (GNU/Linux)\n" b"\n" b"iEYEABECAAYFAkliqx8ACgkQqSMmLy9u/" b"kcx5ACfakZ9NnPl02tOyYP6pkBoEkU1\n" b"5EcAn0UFgokaSvS371Ym/4W9iJj6vh3h\n" b"=ql7y\n" b"-----END PGP SIGNATURE-----\n", ) def test_read_commit_from_file(self): sha = b"60dacdc733de308bb77bb76ce0fb0f9b44c9769e" c = self.commit(sha) self.assertEqual(c.tree, tree_sha) self.assertEqual(c.parents, [b"0d89f20333fbb1d2f3a94da77f4981373d8f4310"]) self.assertEqual(c.author, b"James Westby ") self.assertEqual(c.committer, b"James Westby ") self.assertEqual(c.commit_time, 1174759230) self.assertEqual(c.commit_timezone, 0) self.assertEqual(c.author_timezone, 0) self.assertEqual(c.message, b"Test commit\n") def test_read_commit_no_parents(self): sha = b"0d89f20333fbb1d2f3a94da77f4981373d8f4310" c = self.commit(sha) self.assertEqual(c.tree, b"90182552c4a85a45ec2a835cadc3451bebdfe870") self.assertEqual(c.parents, []) self.assertEqual(c.author, b"James Westby ") self.assertEqual(c.committer, b"James Westby ") self.assertEqual(c.commit_time, 1174758034) self.assertEqual(c.commit_timezone, 0) self.assertEqual(c.author_timezone, 0) self.assertEqual(c.message, b"Test commit\n") def test_read_commit_two_parents(self): sha = b"5dac377bdded4c9aeb8dff595f0faeebcc8498cc" c = self.commit(sha) self.assertEqual(c.tree, b"d80c186a03f423a81b39df39dc87fd269736ca86") self.assertEqual( c.parents, [ b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd", b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6", ], ) self.assertEqual(c.author, b"James Westby ") self.assertEqual(c.committer, b"James Westby ") self.assertEqual(c.commit_time, 1174773719) self.assertEqual(c.commit_timezone, 0) self.assertEqual(c.author_timezone, 0) self.assertEqual(c.message, b"Merge ../b\n") def test_stub_sha(self): sha = b"5" * 40 c = make_commit(id=sha, message=b"foo") self.assertTrue(isinstance(c, Commit)) self.assertEqual(sha, c.id) self.assertNotEqual(sha, c.sha()) class ShaFileCheckTests(TestCase): def assertCheckFails(self, cls, data): obj = cls() def do_check(): obj.set_raw_string(data) obj.check() self.assertRaises(ObjectFormatException, do_check) def assertCheckSucceeds(self, cls, data): obj = cls() obj.set_raw_string(data) self.assertEqual(None, obj.check()) small_buffer_zlib_object = ( b"\x48\x89\x15\xcc\x31\x0e\xc2\x30\x0c\x40\x51\xe6" b"\x9c\xc2\x3b\xaa\x64\x37\xc4\xc1\x12\x42\x5c\xc5" b"\x49\xac\x52\xd4\x92\xaa\x78\xe1\xf6\x94\xed\xeb" b"\x0d\xdf\x75\x02\xa2\x7c\xea\xe5\x65\xd5\x81\x8b" b"\x9a\x61\xba\xa0\xa9\x08\x36\xc9\x4c\x1a\xad\x88" b"\x16\xba\x46\xc4\xa8\x99\x6a\x64\xe1\xe0\xdf\xcd" b"\xa0\xf6\x75\x9d\x3d\xf8\xf1\xd0\x77\xdb\xfb\xdc" b"\x86\xa3\x87\xf1\x2f\x93\xed\x00\xb7\xc7\xd2\xab" b"\x2e\xcf\xfe\xf1\x3b\x50\xa4\x91\x53\x12\x24\x38" b"\x23\x21\x86\xf0\x03\x2f\x91\x24\x52" ) class ShaFileTests(TestCase): def test_deflated_smaller_window_buffer(self): # zlib on some systems uses smaller buffers, # resulting in a different header. # See https://github.com/libgit2/libgit2/pull/464 sf = ShaFile.from_file(BytesIO(small_buffer_zlib_object)) self.assertEqual(sf.type_name, b"tag") self.assertEqual(sf.tagger, b" <@localhost>") class CommitSerializationTests(TestCase): def make_commit(self, **kwargs): attrs = { "tree": b"d80c186a03f423a81b39df39dc87fd269736ca86", "parents": [ b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd", b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6", ], "author": b"James Westby ", "committer": b"James Westby ", "commit_time": 1174773719, "author_time": 1174773719, "commit_timezone": 0, "author_timezone": 0, "message": b"Merge ../b\n", } attrs.update(kwargs) return make_commit(**attrs) def test_encoding(self): c = self.make_commit(encoding=b"iso8859-1") self.assertTrue(b"encoding iso8859-1\n" in c.as_raw_string()) def test_short_timestamp(self): c = self.make_commit(commit_time=30) c1 = Commit() c1.set_raw_string(c.as_raw_string()) self.assertEqual(30, c1.commit_time) def test_full_tree(self): c = self.make_commit(commit_time=30) t = Tree() t.add(b"data-x", 0o644, Blob().id) c.tree = t c1 = Commit() c1.set_raw_string(c.as_raw_string()) self.assertEqual(t.id, c1.tree) self.assertEqual(c.as_raw_string(), c1.as_raw_string()) def test_raw_length(self): c = self.make_commit() self.assertEqual(len(c.as_raw_string()), c.raw_length()) def test_simple(self): c = self.make_commit() self.assertEqual(c.id, b"5dac377bdded4c9aeb8dff595f0faeebcc8498cc") self.assertEqual( b"tree d80c186a03f423a81b39df39dc87fd269736ca86\n" b"parent ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd\n" b"parent 4cffe90e0a41ad3f5190079d7c8f036bde29cbe6\n" b"author James Westby " b"1174773719 +0000\n" b"committer James Westby " b"1174773719 +0000\n" b"\n" b"Merge ../b\n", c.as_raw_string(), ) def test_timezone(self): c = self.make_commit(commit_timezone=(5 * 60)) self.assertTrue(b" +0005\n" in c.as_raw_string()) def test_neg_timezone(self): c = self.make_commit(commit_timezone=(-1 * 3600)) self.assertTrue(b" -0100\n" in c.as_raw_string()) def test_deserialize(self): c = self.make_commit() d = Commit() d._deserialize(c.as_raw_chunks()) self.assertEqual(c, d) def test_serialize_gpgsig(self): commit = self.make_commit( gpgsig=b"""-----BEGIN PGP SIGNATURE----- Version: GnuPG v1 iQIcBAABCgAGBQJULCdfAAoJEACAbyvXKaRXuKwP/RyP9PA49uAvu8tQVCC/uBa8 vi975+xvO14R8Pp8k2nps7lSxCdtCd+xVT1VRHs0wNhOZo2YCVoU1HATkPejqSeV NScTHcxnk4/+bxyfk14xvJkNp7FlQ3npmBkA+lbV0Ubr33rvtIE5jiJPyz+SgWAg xdBG2TojV0squj00GoH/euK6aX7GgZtwdtpTv44haCQdSuPGDcI4TORqR6YSqvy3 GPE+3ZqXPFFb+KILtimkxitdwB7CpwmNse2vE3rONSwTvi8nq3ZoQYNY73CQGkUy qoFU0pDtw87U3niFin1ZccDgH0bB6624sLViqrjcbYJeg815Htsu4rmzVaZADEVC XhIO4MThebusdk0AcNGjgpf3HRHk0DPMDDlIjm+Oao0cqovvF6VyYmcb0C+RmhJj dodLXMNmbqErwTk3zEkW0yZvNIYXH7m9SokPCZa4eeIM7be62X6h1mbt0/IU6Th+ v18fS0iTMP/Viug5und+05C/v04kgDo0CPphAbXwWMnkE4B6Tl9sdyUYXtvQsL7x 0+WP1gL27ANqNZiI07Kz/BhbBAQI/+2TFT7oGr0AnFPQ5jHp+3GpUf6OKuT1wT3H ND189UFuRuubxb42vZhpcXRbqJVWnbECTKVUPsGZqat3enQUB63uM4i6/RdONDZA fDeF1m4qYs+cUXKNUZ03 =X6RT -----END PGP SIGNATURE-----""" ) self.maxDiff = None self.assertEqual( b"""\ tree d80c186a03f423a81b39df39dc87fd269736ca86 parent ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd parent 4cffe90e0a41ad3f5190079d7c8f036bde29cbe6 author James Westby 1174773719 +0000 committer James Westby 1174773719 +0000 gpgsig -----BEGIN PGP SIGNATURE----- Version: GnuPG v1 iQIcBAABCgAGBQJULCdfAAoJEACAbyvXKaRXuKwP/RyP9PA49uAvu8tQVCC/uBa8 vi975+xvO14R8Pp8k2nps7lSxCdtCd+xVT1VRHs0wNhOZo2YCVoU1HATkPejqSeV NScTHcxnk4/+bxyfk14xvJkNp7FlQ3npmBkA+lbV0Ubr33rvtIE5jiJPyz+SgWAg xdBG2TojV0squj00GoH/euK6aX7GgZtwdtpTv44haCQdSuPGDcI4TORqR6YSqvy3 GPE+3ZqXPFFb+KILtimkxitdwB7CpwmNse2vE3rONSwTvi8nq3ZoQYNY73CQGkUy qoFU0pDtw87U3niFin1ZccDgH0bB6624sLViqrjcbYJeg815Htsu4rmzVaZADEVC XhIO4MThebusdk0AcNGjgpf3HRHk0DPMDDlIjm+Oao0cqovvF6VyYmcb0C+RmhJj dodLXMNmbqErwTk3zEkW0yZvNIYXH7m9SokPCZa4eeIM7be62X6h1mbt0/IU6Th+ v18fS0iTMP/Viug5und+05C/v04kgDo0CPphAbXwWMnkE4B6Tl9sdyUYXtvQsL7x 0+WP1gL27ANqNZiI07Kz/BhbBAQI/+2TFT7oGr0AnFPQ5jHp+3GpUf6OKuT1wT3H ND189UFuRuubxb42vZhpcXRbqJVWnbECTKVUPsGZqat3enQUB63uM4i6/RdONDZA fDeF1m4qYs+cUXKNUZ03 =X6RT -----END PGP SIGNATURE----- Merge ../b """, commit.as_raw_string(), ) # noqa: W291,W293 def test_serialize_mergetag(self): tag = make_object( Tag, object=(Commit, b"a38d6181ff27824c79fc7df825164a212eff6a3f"), object_type_name=b"commit", name=b"v2.6.22-rc7", tag_time=1183319674, tag_timezone=0, tagger=b"Linus Torvalds ", message=default_message, ) commit = self.make_commit(mergetag=[tag]) self.assertEqual( b"""tree d80c186a03f423a81b39df39dc87fd269736ca86 parent ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd parent 4cffe90e0a41ad3f5190079d7c8f036bde29cbe6 author James Westby 1174773719 +0000 committer James Westby 1174773719 +0000 mergetag object a38d6181ff27824c79fc7df825164a212eff6a3f type commit tag v2.6.22-rc7 tagger Linus Torvalds 1183319674 +0000 Linux 2.6.22-rc7 -----BEGIN PGP SIGNATURE----- Version: GnuPG v1.4.7 (GNU/Linux) iD8DBQBGiAaAF3YsRnbiHLsRAitMAKCiLboJkQECM/jpYsY3WPfvUgLXkACgg3ql OK2XeQOiEeXtT76rV4t2WR4= =ivrA -----END PGP SIGNATURE----- Merge ../b """, commit.as_raw_string(), ) # noqa: W291,W293 def test_serialize_mergetags(self): tag = make_object( Tag, object=(Commit, b"a38d6181ff27824c79fc7df825164a212eff6a3f"), object_type_name=b"commit", name=b"v2.6.22-rc7", tag_time=1183319674, tag_timezone=0, tagger=b"Linus Torvalds ", message=default_message, ) commit = self.make_commit(mergetag=[tag, tag]) self.assertEqual( b"""tree d80c186a03f423a81b39df39dc87fd269736ca86 parent ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd parent 4cffe90e0a41ad3f5190079d7c8f036bde29cbe6 author James Westby 1174773719 +0000 committer James Westby 1174773719 +0000 mergetag object a38d6181ff27824c79fc7df825164a212eff6a3f type commit tag v2.6.22-rc7 tagger Linus Torvalds 1183319674 +0000 Linux 2.6.22-rc7 -----BEGIN PGP SIGNATURE----- Version: GnuPG v1.4.7 (GNU/Linux) iD8DBQBGiAaAF3YsRnbiHLsRAitMAKCiLboJkQECM/jpYsY3WPfvUgLXkACgg3ql OK2XeQOiEeXtT76rV4t2WR4= =ivrA -----END PGP SIGNATURE----- mergetag object a38d6181ff27824c79fc7df825164a212eff6a3f type commit tag v2.6.22-rc7 tagger Linus Torvalds 1183319674 +0000 Linux 2.6.22-rc7 -----BEGIN PGP SIGNATURE----- Version: GnuPG v1.4.7 (GNU/Linux) iD8DBQBGiAaAF3YsRnbiHLsRAitMAKCiLboJkQECM/jpYsY3WPfvUgLXkACgg3ql OK2XeQOiEeXtT76rV4t2WR4= =ivrA -----END PGP SIGNATURE----- Merge ../b """, commit.as_raw_string(), ) # noqa: W291,W293 def test_deserialize_mergetag(self): tag = make_object( Tag, object=(Commit, b"a38d6181ff27824c79fc7df825164a212eff6a3f"), object_type_name=b"commit", name=b"v2.6.22-rc7", tag_time=1183319674, tag_timezone=0, tagger=b"Linus Torvalds ", message=default_message, ) commit = self.make_commit(mergetag=[tag]) d = Commit() d._deserialize(commit.as_raw_chunks()) self.assertEqual(commit, d) def test_deserialize_mergetags(self): tag = make_object( Tag, object=(Commit, b"a38d6181ff27824c79fc7df825164a212eff6a3f"), object_type_name=b"commit", name=b"v2.6.22-rc7", tag_time=1183319674, tag_timezone=0, tagger=b"Linus Torvalds ", message=default_message, ) commit = self.make_commit(mergetag=[tag, tag]) d = Commit() d._deserialize(commit.as_raw_chunks()) self.assertEqual(commit, d) default_committer = b"James Westby 1174773719 +0000" class CommitParseTests(ShaFileCheckTests): def make_commit_lines( self, tree=b"d80c186a03f423a81b39df39dc87fd269736ca86", parents=[ b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd", b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6", ], author=default_committer, committer=default_committer, encoding=None, message=b"Merge ../b\n", extra=None, ): lines = [] if tree is not None: lines.append(b"tree " + tree) if parents is not None: lines.extend(b"parent " + p for p in parents) if author is not None: lines.append(b"author " + author) if committer is not None: lines.append(b"committer " + committer) if encoding is not None: lines.append(b"encoding " + encoding) if extra is not None: for name, value in sorted(extra.items()): lines.append(name + b" " + value) lines.append(b"") if message is not None: lines.append(message) return lines def make_commit_text(self, **kwargs): return b"\n".join(self.make_commit_lines(**kwargs)) def test_simple(self): c = Commit.from_string(self.make_commit_text()) self.assertEqual(b"Merge ../b\n", c.message) self.assertEqual(b"James Westby ", c.author) self.assertEqual(b"James Westby ", c.committer) self.assertEqual(b"d80c186a03f423a81b39df39dc87fd269736ca86", c.tree) self.assertEqual( [ b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd", b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6", ], c.parents, ) expected_time = datetime.datetime(2007, 3, 24, 22, 1, 59) self.assertEqual( expected_time, datetime.datetime.utcfromtimestamp(c.commit_time) ) self.assertEqual(0, c.commit_timezone) self.assertEqual( expected_time, datetime.datetime.utcfromtimestamp(c.author_time) ) self.assertEqual(0, c.author_timezone) self.assertEqual(None, c.encoding) def test_custom(self): c = Commit.from_string(self.make_commit_text(extra={b"extra-field": b"data"})) self.assertEqual([(b"extra-field", b"data")], c.extra) def test_encoding(self): c = Commit.from_string(self.make_commit_text(encoding=b"UTF-8")) self.assertEqual(b"UTF-8", c.encoding) def test_check(self): self.assertCheckSucceeds(Commit, self.make_commit_text()) self.assertCheckSucceeds(Commit, self.make_commit_text(parents=None)) self.assertCheckSucceeds(Commit, self.make_commit_text(encoding=b"UTF-8")) self.assertCheckFails(Commit, self.make_commit_text(tree=b"xxx")) self.assertCheckFails(Commit, self.make_commit_text(parents=[a_sha, b"xxx"])) bad_committer = b"some guy without an email address 1174773719 +0000" self.assertCheckFails(Commit, self.make_commit_text(committer=bad_committer)) self.assertCheckFails(Commit, self.make_commit_text(author=bad_committer)) self.assertCheckFails(Commit, self.make_commit_text(author=None)) self.assertCheckFails(Commit, self.make_commit_text(committer=None)) self.assertCheckFails( Commit, self.make_commit_text(author=None, committer=None) ) def test_check_duplicates(self): # duplicate each of the header fields for i in range(5): lines = self.make_commit_lines(parents=[a_sha], encoding=b"UTF-8") lines.insert(i, lines[i]) text = b"\n".join(lines) if lines[i].startswith(b"parent"): # duplicate parents are ok for now self.assertCheckSucceeds(Commit, text) else: self.assertCheckFails(Commit, text) def test_check_order(self): lines = self.make_commit_lines(parents=[a_sha], encoding=b"UTF-8") headers = lines[:5] rest = lines[5:] # of all possible permutations, ensure only the original succeeds for perm in permutations(headers): perm = list(perm) text = b"\n".join(perm + rest) if perm == headers: self.assertCheckSucceeds(Commit, text) else: self.assertCheckFails(Commit, text) def test_check_commit_with_unparseable_time(self): identity_with_wrong_time = ( b"Igor Sysoev 18446743887488505614+42707004" ) # Those fail at reading time self.assertCheckFails( Commit, self.make_commit_text( author=default_committer, committer=identity_with_wrong_time ), ) self.assertCheckFails( Commit, self.make_commit_text( author=identity_with_wrong_time, committer=default_committer ), ) def test_check_commit_with_overflow_date(self): """Date with overflow should raise an ObjectFormatException when checked""" identity_with_wrong_time = ( b"Igor Sysoev 18446743887488505614 +42707004" ) commit0 = Commit.from_string( self.make_commit_text( author=identity_with_wrong_time, committer=default_committer ) ) commit1 = Commit.from_string( self.make_commit_text( author=default_committer, committer=identity_with_wrong_time ) ) # Those fails when triggering the check() method for commit in [commit0, commit1]: with self.assertRaises(ObjectFormatException): commit.check() def test_mangled_author_line(self): """Mangled author line should successfully parse""" author_line = ( b'Karl MacMillan <"Karl MacMillan ' b'"> 1197475547 -0500' ) expected_identity = ( b'Karl MacMillan <"Karl MacMillan ' b'">' ) commit = Commit.from_string(self.make_commit_text(author=author_line)) # The commit parses properly self.assertEqual(commit.author, expected_identity) # But the check fails because the author identity is bogus with self.assertRaises(ObjectFormatException): commit.check() def test_parse_gpgsig(self): c = Commit.from_string( b"""tree aaff74984cccd156a469afa7d9ab10e4777beb24 author Jelmer Vernooij 1412179807 +0200 committer Jelmer Vernooij 1412179807 +0200 gpgsig -----BEGIN PGP SIGNATURE----- Version: GnuPG v1 iQIcBAABCgAGBQJULCdfAAoJEACAbyvXKaRXuKwP/RyP9PA49uAvu8tQVCC/uBa8 vi975+xvO14R8Pp8k2nps7lSxCdtCd+xVT1VRHs0wNhOZo2YCVoU1HATkPejqSeV NScTHcxnk4/+bxyfk14xvJkNp7FlQ3npmBkA+lbV0Ubr33rvtIE5jiJPyz+SgWAg xdBG2TojV0squj00GoH/euK6aX7GgZtwdtpTv44haCQdSuPGDcI4TORqR6YSqvy3 GPE+3ZqXPFFb+KILtimkxitdwB7CpwmNse2vE3rONSwTvi8nq3ZoQYNY73CQGkUy qoFU0pDtw87U3niFin1ZccDgH0bB6624sLViqrjcbYJeg815Htsu4rmzVaZADEVC XhIO4MThebusdk0AcNGjgpf3HRHk0DPMDDlIjm+Oao0cqovvF6VyYmcb0C+RmhJj dodLXMNmbqErwTk3zEkW0yZvNIYXH7m9SokPCZa4eeIM7be62X6h1mbt0/IU6Th+ v18fS0iTMP/Viug5und+05C/v04kgDo0CPphAbXwWMnkE4B6Tl9sdyUYXtvQsL7x 0+WP1gL27ANqNZiI07Kz/BhbBAQI/+2TFT7oGr0AnFPQ5jHp+3GpUf6OKuT1wT3H ND189UFuRuubxb42vZhpcXRbqJVWnbECTKVUPsGZqat3enQUB63uM4i6/RdONDZA fDeF1m4qYs+cUXKNUZ03 =X6RT -----END PGP SIGNATURE----- foo """ ) # noqa: W291,W293 self.assertEqual(b"foo\n", c.message) self.assertEqual([], c.extra) self.assertEqual( b"""-----BEGIN PGP SIGNATURE----- Version: GnuPG v1 iQIcBAABCgAGBQJULCdfAAoJEACAbyvXKaRXuKwP/RyP9PA49uAvu8tQVCC/uBa8 vi975+xvO14R8Pp8k2nps7lSxCdtCd+xVT1VRHs0wNhOZo2YCVoU1HATkPejqSeV NScTHcxnk4/+bxyfk14xvJkNp7FlQ3npmBkA+lbV0Ubr33rvtIE5jiJPyz+SgWAg xdBG2TojV0squj00GoH/euK6aX7GgZtwdtpTv44haCQdSuPGDcI4TORqR6YSqvy3 GPE+3ZqXPFFb+KILtimkxitdwB7CpwmNse2vE3rONSwTvi8nq3ZoQYNY73CQGkUy qoFU0pDtw87U3niFin1ZccDgH0bB6624sLViqrjcbYJeg815Htsu4rmzVaZADEVC XhIO4MThebusdk0AcNGjgpf3HRHk0DPMDDlIjm+Oao0cqovvF6VyYmcb0C+RmhJj dodLXMNmbqErwTk3zEkW0yZvNIYXH7m9SokPCZa4eeIM7be62X6h1mbt0/IU6Th+ v18fS0iTMP/Viug5und+05C/v04kgDo0CPphAbXwWMnkE4B6Tl9sdyUYXtvQsL7x 0+WP1gL27ANqNZiI07Kz/BhbBAQI/+2TFT7oGr0AnFPQ5jHp+3GpUf6OKuT1wT3H ND189UFuRuubxb42vZhpcXRbqJVWnbECTKVUPsGZqat3enQUB63uM4i6/RdONDZA fDeF1m4qYs+cUXKNUZ03 =X6RT -----END PGP SIGNATURE-----""", c.gpgsig, ) def test_parse_header_trailing_newline(self): c = Commit.from_string( b"""\ tree a7d6277f78d3ecd0230a1a5df6db00b1d9c521ac parent c09b6dec7a73760fbdb478383a3c926b18db8bbe author Neil Matatall 1461964057 -1000 committer Neil Matatall 1461964057 -1000 gpgsig -----BEGIN PGP SIGNATURE----- wsBcBAABCAAQBQJXI80ZCRA6pcNDcVZ70gAAarcIABs72xRX3FWeox349nh6ucJK CtwmBTusez2Zwmq895fQEbZK7jpaGO5TRO4OvjFxlRo0E08UFx3pxZHSpj6bsFeL hHsDXnCaotphLkbgKKRdGZo7tDqM84wuEDlh4MwNe7qlFC7bYLDyysc81ZX5lpMm 2MFF1TvjLAzSvkT7H1LPkuR3hSvfCYhikbPOUNnKOo0sYjeJeAJ/JdAVQ4mdJIM0 gl3REp9+A+qBEpNQI7z94Pg5Bc5xenwuDh3SJgHvJV6zBWupWcdB3fAkVd4TPnEZ nHxksHfeNln9RKseIDcy4b2ATjhDNIJZARHNfr6oy4u3XPW4svRqtBsLoMiIeuI= =ms6q -----END PGP SIGNATURE----- 3.3.0 version bump and docs """ ) # noqa: W291,W293 self.assertEqual([], c.extra) self.assertEqual( b"""\ -----BEGIN PGP SIGNATURE----- wsBcBAABCAAQBQJXI80ZCRA6pcNDcVZ70gAAarcIABs72xRX3FWeox349nh6ucJK CtwmBTusez2Zwmq895fQEbZK7jpaGO5TRO4OvjFxlRo0E08UFx3pxZHSpj6bsFeL hHsDXnCaotphLkbgKKRdGZo7tDqM84wuEDlh4MwNe7qlFC7bYLDyysc81ZX5lpMm 2MFF1TvjLAzSvkT7H1LPkuR3hSvfCYhikbPOUNnKOo0sYjeJeAJ/JdAVQ4mdJIM0 gl3REp9+A+qBEpNQI7z94Pg5Bc5xenwuDh3SJgHvJV6zBWupWcdB3fAkVd4TPnEZ nHxksHfeNln9RKseIDcy4b2ATjhDNIJZARHNfr6oy4u3XPW4svRqtBsLoMiIeuI= =ms6q -----END PGP SIGNATURE-----\n""", c.gpgsig, ) _TREE_ITEMS = { b"a.c": (0o100755, b"d80c186a03f423a81b39df39dc87fd269736ca86"), b"a": (stat.S_IFDIR, b"d80c186a03f423a81b39df39dc87fd269736ca86"), b"a/c": (stat.S_IFDIR, b"d80c186a03f423a81b39df39dc87fd269736ca86"), } _SORTED_TREE_ITEMS = [ TreeEntry(b"a.c", 0o100755, b"d80c186a03f423a81b39df39dc87fd269736ca86"), TreeEntry(b"a", stat.S_IFDIR, b"d80c186a03f423a81b39df39dc87fd269736ca86"), TreeEntry(b"a/c", stat.S_IFDIR, b"d80c186a03f423a81b39df39dc87fd269736ca86"), ] class TreeTests(ShaFileCheckTests): def test_add(self): myhexsha = b"d80c186a03f423a81b39df39dc87fd269736ca86" x = Tree() x.add(b"myname", 0o100755, myhexsha) self.assertEqual(x[b"myname"], (0o100755, myhexsha)) self.assertEqual(b"100755 myname\0" + hex_to_sha(myhexsha), x.as_raw_string()) def test_add_old_order(self): myhexsha = b"d80c186a03f423a81b39df39dc87fd269736ca86" x = Tree() warnings.simplefilter("ignore", DeprecationWarning) try: x.add(0o100755, b"myname", myhexsha) finally: warnings.resetwarnings() self.assertEqual(x[b"myname"], (0o100755, myhexsha)) self.assertEqual(b"100755 myname\0" + hex_to_sha(myhexsha), x.as_raw_string()) def test_simple(self): myhexsha = b"d80c186a03f423a81b39df39dc87fd269736ca86" x = Tree() x[b"myname"] = (0o100755, myhexsha) self.assertEqual(b"100755 myname\0" + hex_to_sha(myhexsha), x.as_raw_string()) self.assertEqual(b"100755 myname\0" + hex_to_sha(myhexsha), bytes(x)) def test_tree_update_id(self): x = Tree() x[b"a.c"] = (0o100755, b"d80c186a03f423a81b39df39dc87fd269736ca86") self.assertEqual(b"0c5c6bc2c081accfbc250331b19e43b904ab9cdd", x.id) x[b"a.b"] = (stat.S_IFDIR, b"d80c186a03f423a81b39df39dc87fd269736ca86") self.assertEqual(b"07bfcb5f3ada15bbebdfa3bbb8fd858a363925c8", x.id) def test_tree_iteritems_dir_sort(self): x = Tree() for name, item in _TREE_ITEMS.items(): x[name] = item self.assertEqual(_SORTED_TREE_ITEMS, x.items()) def test_tree_items_dir_sort(self): x = Tree() for name, item in _TREE_ITEMS.items(): x[name] = item self.assertEqual(_SORTED_TREE_ITEMS, x.items()) def _do_test_parse_tree(self, parse_tree): dir = os.path.join(os.path.dirname(__file__), "data", "trees") o = Tree.from_path(hex_to_filename(dir, tree_sha)) self.assertEqual( [(b"a", 0o100644, a_sha), (b"b", 0o100644, b_sha)], list(parse_tree(o.as_raw_string())), ) # test a broken tree that has a leading 0 on the file mode broken_tree = b"0100644 foo\0" + hex_to_sha(a_sha) def eval_parse_tree(*args, **kwargs): return list(parse_tree(*args, **kwargs)) self.assertEqual([(b"foo", 0o100644, a_sha)], eval_parse_tree(broken_tree)) self.assertRaises( ObjectFormatException, eval_parse_tree, broken_tree, strict=True ) test_parse_tree = functest_builder(_do_test_parse_tree, _parse_tree_py) test_parse_tree_extension = ext_functest_builder(_do_test_parse_tree, parse_tree) def _do_test_sorted_tree_items(self, sorted_tree_items): def do_sort(entries): return list(sorted_tree_items(entries, False)) actual = do_sort(_TREE_ITEMS) self.assertEqual(_SORTED_TREE_ITEMS, actual) self.assertTrue(isinstance(actual[0], TreeEntry)) # C/Python implementations may differ in specific error types, but # should all error on invalid inputs. # For example, the C implementation has stricter type checks, so may # raise TypeError where the Python implementation raises # AttributeError. errors = (TypeError, ValueError, AttributeError) self.assertRaises(errors, do_sort, b"foo") self.assertRaises(errors, do_sort, {b"foo": (1, 2, 3)}) myhexsha = b"d80c186a03f423a81b39df39dc87fd269736ca86" self.assertRaises(errors, do_sort, {b"foo": (b"xxx", myhexsha)}) self.assertRaises(errors, do_sort, {b"foo": (0o100755, 12345)}) test_sorted_tree_items = functest_builder( _do_test_sorted_tree_items, _sorted_tree_items_py ) test_sorted_tree_items_extension = ext_functest_builder( _do_test_sorted_tree_items, sorted_tree_items ) def _do_test_sorted_tree_items_name_order(self, sorted_tree_items): self.assertEqual( [ TreeEntry( - b"a", stat.S_IFDIR, b"d80c186a03f423a81b39df39dc87fd269736ca86" + b"a", + stat.S_IFDIR, + b"d80c186a03f423a81b39df39dc87fd269736ca86", ), TreeEntry( - b"a.c", 0o100755, b"d80c186a03f423a81b39df39dc87fd269736ca86" + b"a.c", + 0o100755, + b"d80c186a03f423a81b39df39dc87fd269736ca86", ), TreeEntry( - b"a/c", stat.S_IFDIR, b"d80c186a03f423a81b39df39dc87fd269736ca86" + b"a/c", + stat.S_IFDIR, + b"d80c186a03f423a81b39df39dc87fd269736ca86", ), ], list(sorted_tree_items(_TREE_ITEMS, True)), ) test_sorted_tree_items_name_order = functest_builder( _do_test_sorted_tree_items_name_order, _sorted_tree_items_py ) test_sorted_tree_items_name_order_extension = ext_functest_builder( _do_test_sorted_tree_items_name_order, sorted_tree_items ) def test_check(self): t = Tree sha = hex_to_sha(a_sha) # filenames self.assertCheckSucceeds(t, b"100644 .a\0" + sha) self.assertCheckFails(t, b"100644 \0" + sha) self.assertCheckFails(t, b"100644 .\0" + sha) self.assertCheckFails(t, b"100644 a/a\0" + sha) self.assertCheckFails(t, b"100644 ..\0" + sha) self.assertCheckFails(t, b"100644 .git\0" + sha) # modes self.assertCheckSucceeds(t, b"100644 a\0" + sha) self.assertCheckSucceeds(t, b"100755 a\0" + sha) self.assertCheckSucceeds(t, b"160000 a\0" + sha) # TODO more whitelisted modes self.assertCheckFails(t, b"123456 a\0" + sha) self.assertCheckFails(t, b"123abc a\0" + sha) # should fail check, but parses ok self.assertCheckFails(t, b"0100644 foo\0" + sha) # shas self.assertCheckFails(t, b"100644 a\0" + (b"x" * 5)) self.assertCheckFails(t, b"100644 a\0" + (b"x" * 18) + b"\0") self.assertCheckFails(t, b"100644 a\0" + (b"x" * 21) + b"\n100644 b\0" + sha) # ordering sha2 = hex_to_sha(b_sha) self.assertCheckSucceeds(t, b"100644 a\0" + sha + b"\n100644 b\0" + sha) self.assertCheckSucceeds(t, b"100644 a\0" + sha + b"\n100644 b\0" + sha2) self.assertCheckFails(t, b"100644 a\0" + sha + b"\n100755 a\0" + sha2) self.assertCheckFails(t, b"100644 b\0" + sha2 + b"\n100644 a\0" + sha) def test_iter(self): t = Tree() t[b"foo"] = (0o100644, a_sha) self.assertEqual(set([b"foo"]), set(t)) class TagSerializeTests(TestCase): def test_serialize_simple(self): x = make_object( Tag, tagger=b"Jelmer Vernooij ", name=b"0.1", message=b"Tag 0.1", object=(Blob, b"d80c186a03f423a81b39df39dc87fd269736ca86"), tag_time=423423423, tag_timezone=0, ) self.assertEqual( ( b"object d80c186a03f423a81b39df39dc87fd269736ca86\n" b"type blob\n" b"tag 0.1\n" b"tagger Jelmer Vernooij " b"423423423 +0000\n" b"\n" b"Tag 0.1" ), x.as_raw_string(), ) def test_serialize_none_message(self): x = make_object( Tag, tagger=b"Jelmer Vernooij ", name=b"0.1", message=None, object=(Blob, b"d80c186a03f423a81b39df39dc87fd269736ca86"), tag_time=423423423, tag_timezone=0, ) self.assertEqual( ( b"object d80c186a03f423a81b39df39dc87fd269736ca86\n" b"type blob\n" b"tag 0.1\n" b"tagger Jelmer Vernooij " b"423423423 +0000\n" ), x.as_raw_string(), ) default_tagger = ( b"Linus Torvalds " b"1183319674 -0700" ) default_message = b"""Linux 2.6.22-rc7 -----BEGIN PGP SIGNATURE----- Version: GnuPG v1.4.7 (GNU/Linux) iD8DBQBGiAaAF3YsRnbiHLsRAitMAKCiLboJkQECM/jpYsY3WPfvUgLXkACgg3ql OK2XeQOiEeXtT76rV4t2WR4= =ivrA -----END PGP SIGNATURE----- """ class TagParseTests(ShaFileCheckTests): def make_tag_lines( self, object_sha=b"a38d6181ff27824c79fc7df825164a212eff6a3f", object_type_name=b"commit", name=b"v2.6.22-rc7", tagger=default_tagger, message=default_message, ): lines = [] if object_sha is not None: lines.append(b"object " + object_sha) if object_type_name is not None: lines.append(b"type " + object_type_name) if name is not None: lines.append(b"tag " + name) if tagger is not None: lines.append(b"tagger " + tagger) if message is not None: lines.append(b"") lines.append(message) return lines def make_tag_text(self, **kwargs): return b"\n".join(self.make_tag_lines(**kwargs)) def test_parse(self): x = Tag() x.set_raw_string(self.make_tag_text()) self.assertEqual( b"Linus Torvalds ", x.tagger ) self.assertEqual(b"v2.6.22-rc7", x.name) object_type, object_sha = x.object self.assertEqual(b"a38d6181ff27824c79fc7df825164a212eff6a3f", object_sha) self.assertEqual(Commit, object_type) self.assertEqual( datetime.datetime.utcfromtimestamp(x.tag_time), datetime.datetime(2007, 7, 1, 19, 54, 34), ) self.assertEqual(-25200, x.tag_timezone) def test_parse_no_tagger(self): x = Tag() x.set_raw_string(self.make_tag_text(tagger=None)) self.assertEqual(None, x.tagger) self.assertEqual(b"v2.6.22-rc7", x.name) self.assertEqual(None, x.tag_time) def test_parse_no_message(self): x = Tag() x.set_raw_string(self.make_tag_text(message=None)) self.assertEqual(None, x.message) self.assertEqual( b"Linus Torvalds ", x.tagger ) self.assertEqual( datetime.datetime.utcfromtimestamp(x.tag_time), datetime.datetime(2007, 7, 1, 19, 54, 34), ) self.assertEqual(-25200, x.tag_timezone) self.assertEqual(b"v2.6.22-rc7", x.name) def test_check(self): self.assertCheckSucceeds(Tag, self.make_tag_text()) self.assertCheckFails(Tag, self.make_tag_text(object_sha=None)) self.assertCheckFails(Tag, self.make_tag_text(object_type_name=None)) self.assertCheckFails(Tag, self.make_tag_text(name=None)) self.assertCheckFails(Tag, self.make_tag_text(name=b"")) self.assertCheckFails(Tag, self.make_tag_text(object_type_name=b"foobar")) self.assertCheckFails( Tag, self.make_tag_text( tagger=b"some guy without an email address 1183319674 -0700" ), ) self.assertCheckFails( Tag, self.make_tag_text( tagger=( b"Linus Torvalds " b"Sun 7 Jul 2007 12:54:34 +0700" ) ), ) self.assertCheckFails(Tag, self.make_tag_text(object_sha=b"xxx")) def test_check_tag_with_unparseable_field(self): self.assertCheckFails( Tag, self.make_tag_text( tagger=( b"Linus Torvalds " b"423423+0000" ) ), ) def test_check_tag_with_overflow_time(self): """Date with overflow should raise an ObjectFormatException when checked""" author = "Some Dude %s +0000" % (MAX_TIME + 1,) tag = Tag.from_string(self.make_tag_text(tagger=(author.encode()))) with self.assertRaises(ObjectFormatException): tag.check() def test_check_duplicates(self): # duplicate each of the header fields for i in range(4): lines = self.make_tag_lines() lines.insert(i, lines[i]) self.assertCheckFails(Tag, b"\n".join(lines)) def test_check_order(self): lines = self.make_tag_lines() headers = lines[:4] rest = lines[4:] # of all possible permutations, ensure only the original succeeds for perm in permutations(headers): perm = list(perm) text = b"\n".join(perm + rest) if perm == headers: self.assertCheckSucceeds(Tag, text) else: self.assertCheckFails(Tag, text) def test_tree_copy_after_update(self): """Check Tree.id is correctly updated when the tree is copied after updated.""" shas = [] tree = Tree() shas.append(tree.id) tree.add(b"data", 0o644, Blob().id) copied = tree.copy() shas.append(tree.id) shas.append(copied.id) self.assertNotIn(shas[0], shas[1:]) self.assertEqual(shas[1], shas[2]) class CheckTests(TestCase): def test_check_hexsha(self): check_hexsha(a_sha, "failed to check good sha") self.assertRaises( ObjectFormatException, check_hexsha, b"1" * 39, "sha too short" ) self.assertRaises( ObjectFormatException, check_hexsha, b"1" * 41, "sha too long" ) self.assertRaises( - ObjectFormatException, check_hexsha, b"x" * 40, "invalid characters" + ObjectFormatException, + check_hexsha, + b"x" * 40, + "invalid characters", ) def test_check_identity(self): check_identity( - b"Dave Borowitz ", "failed to check good identity" + b"Dave Borowitz ", + "failed to check good identity", ) check_identity(b"", "failed to check good identity") self.assertRaises( ObjectFormatException, check_identity, b"Dave Borowitz", "no email" ) self.assertRaises( ObjectFormatException, check_identity, b"Dave Borowitz ", "incomplete email", ) self.assertRaises( ObjectFormatException, check_identity, b"Dave Borowitz <", "typo", ) self.assertRaises( ObjectFormatException, check_identity, b"Dave Borowitz >", "typo", ) self.assertRaises( ObjectFormatException, check_identity, b"Dave Borowitz xxx", "trailing characters", ) class TimezoneTests(TestCase): def test_parse_timezone_utc(self): self.assertEqual((0, False), parse_timezone(b"+0000")) def test_parse_timezone_utc_negative(self): self.assertEqual((0, True), parse_timezone(b"-0000")) def test_generate_timezone_utc(self): self.assertEqual(b"+0000", format_timezone(0)) def test_generate_timezone_utc_negative(self): self.assertEqual(b"-0000", format_timezone(0, True)) def test_parse_timezone_cet(self): self.assertEqual((60 * 60, False), parse_timezone(b"+0100")) def test_format_timezone_cet(self): self.assertEqual(b"+0100", format_timezone(60 * 60)) def test_format_timezone_pdt(self): self.assertEqual(b"-0400", format_timezone(-4 * 60 * 60)) def test_parse_timezone_pdt(self): self.assertEqual((-4 * 60 * 60, False), parse_timezone(b"-0400")) def test_format_timezone_pdt_half(self): self.assertEqual(b"-0440", format_timezone(int(((-4 * 60) - 40) * 60))) def test_format_timezone_double_negative(self): self.assertEqual(b"--700", format_timezone(int(((7 * 60)) * 60), True)) def test_parse_timezone_pdt_half(self): self.assertEqual((((-4 * 60) - 40) * 60, False), parse_timezone(b"-0440")) def test_parse_timezone_double_negative(self): self.assertEqual((int(((7 * 60)) * 60), False), parse_timezone(b"+700")) self.assertEqual((int(((7 * 60)) * 60), True), parse_timezone(b"--700")) class ShaFileCopyTests(TestCase): def assert_copy(self, orig): oclass = object_class(orig.type_num) copy = orig.copy() self.assertTrue(isinstance(copy, oclass)) self.assertEqual(copy, orig) self.assertTrue(copy is not orig) def test_commit_copy(self): attrs = { "tree": b"d80c186a03f423a81b39df39dc87fd269736ca86", "parents": [ b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd", b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6", ], "author": b"James Westby ", "committer": b"James Westby ", "commit_time": 1174773719, "author_time": 1174773719, "commit_timezone": 0, "author_timezone": 0, "message": b"Merge ../b\n", } commit = make_commit(**attrs) self.assert_copy(commit) def test_blob_copy(self): blob = make_object(Blob, data=b"i am a blob") self.assert_copy(blob) def test_tree_copy(self): blob = make_object(Blob, data=b"i am a blob") tree = Tree() tree[b"blob"] = (stat.S_IFREG, blob.id) self.assert_copy(tree) def test_tag_copy(self): tag = make_object( Tag, name=b"tag", message=b"", tagger=b"Tagger ", tag_time=12345, tag_timezone=0, object=(Commit, b"0" * 40), ) self.assert_copy(tag) class ShaFileSerializeTests(TestCase): """`ShaFile` objects only gets serialized once if they haven't changed.""" @contextmanager def assert_serialization_on_change( self, obj, needs_serialization_after_change=True ): old_id = obj.id self.assertFalse(obj._needs_serialization) yield obj if needs_serialization_after_change: self.assertTrue(obj._needs_serialization) else: self.assertFalse(obj._needs_serialization) new_id = obj.id self.assertFalse(obj._needs_serialization) self.assertNotEqual(old_id, new_id) def test_commit_serialize(self): attrs = { "tree": b"d80c186a03f423a81b39df39dc87fd269736ca86", "parents": [ b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd", b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6", ], "author": b"James Westby ", "committer": b"James Westby ", "commit_time": 1174773719, "author_time": 1174773719, "commit_timezone": 0, "author_timezone": 0, "message": b"Merge ../b\n", } commit = make_commit(**attrs) with self.assert_serialization_on_change(commit): commit.parents = [b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd"] def test_blob_serialize(self): blob = make_object(Blob, data=b"i am a blob") with self.assert_serialization_on_change( blob, needs_serialization_after_change=False ): blob.data = b"i am another blob" def test_tree_serialize(self): blob = make_object(Blob, data=b"i am a blob") tree = Tree() tree[b"blob"] = (stat.S_IFREG, blob.id) with self.assert_serialization_on_change(tree): tree[b"blob2"] = (stat.S_IFREG, blob.id) def test_tag_serialize(self): tag = make_object( Tag, name=b"tag", message=b"", tagger=b"Tagger ", tag_time=12345, tag_timezone=0, object=(Commit, b"0" * 40), ) with self.assert_serialization_on_change(tag): tag.message = b"new message" def test_tag_serialize_time_error(self): with self.assertRaises(ObjectFormatException): tag = make_object( Tag, name=b"tag", message=b"some message", tagger=b"Tagger 1174773719+0000", object=(Commit, b"0" * 40), ) tag._deserialize(tag._serialize()) class PrettyFormatTreeEntryTests(TestCase): def test_format(self): self.assertEqual( "40000 tree 40820c38cfb182ce6c8b261555410d8382a5918b\tfoo\n", pretty_format_tree_entry( b"foo", 0o40000, b"40820c38cfb182ce6c8b261555410d8382a5918b" ), ) diff --git a/dulwich/tests/test_objectspec.py b/dulwich/tests/test_objectspec.py index b3cd4ba5..67c6049b 100644 --- a/dulwich/tests/test_objectspec.py +++ b/dulwich/tests/test_objectspec.py @@ -1,254 +1,260 @@ # test_objectspec.py -- tests for objectspec.py # Copyright (C) 2014 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Tests for revision spec parsing.""" # TODO: Round-trip parse-serialize-parse and serialize-parse-serialize tests. from dulwich.objects import ( Blob, ) from dulwich.objectspec import ( parse_object, parse_commit, parse_commit_range, parse_ref, parse_refs, parse_reftuple, parse_reftuples, parse_tree, ) from dulwich.repo import MemoryRepo from dulwich.tests import ( TestCase, ) from dulwich.tests.utils import ( build_commit_graph, ) class ParseObjectTests(TestCase): """Test parse_object.""" def test_nonexistent(self): r = MemoryRepo() self.assertRaises(KeyError, parse_object, r, "thisdoesnotexist") def test_blob_by_sha(self): r = MemoryRepo() b = Blob.from_string(b"Blah") r.object_store.add_object(b) self.assertEqual(b, parse_object(r, b.id)) class ParseCommitRangeTests(TestCase): """Test parse_commit_range.""" def test_nonexistent(self): r = MemoryRepo() self.assertRaises(KeyError, parse_commit_range, r, "thisdoesnotexist") def test_commit_by_sha(self): r = MemoryRepo() c1, c2, c3 = build_commit_graph(r.object_store, [[1], [2, 1], [3, 1, 2]]) self.assertEqual([c1], list(parse_commit_range(r, c1.id))) class ParseCommitTests(TestCase): """Test parse_commit.""" def test_nonexistent(self): r = MemoryRepo() self.assertRaises(KeyError, parse_commit, r, "thisdoesnotexist") def test_commit_by_sha(self): r = MemoryRepo() [c1] = build_commit_graph(r.object_store, [[1]]) self.assertEqual(c1, parse_commit(r, c1.id)) def test_commit_by_short_sha(self): r = MemoryRepo() [c1] = build_commit_graph(r.object_store, [[1]]) self.assertEqual(c1, parse_commit(r, c1.id[:10])) class ParseRefTests(TestCase): def test_nonexistent(self): r = {} self.assertRaises(KeyError, parse_ref, r, b"thisdoesnotexist") def test_ambiguous_ref(self): r = { b"ambig1": "bla", b"refs/ambig1": "bla", b"refs/tags/ambig1": "bla", b"refs/heads/ambig1": "bla", b"refs/remotes/ambig1": "bla", b"refs/remotes/ambig1/HEAD": "bla", } self.assertEqual(b"ambig1", parse_ref(r, b"ambig1")) def test_ambiguous_ref2(self): r = { b"refs/ambig2": "bla", b"refs/tags/ambig2": "bla", b"refs/heads/ambig2": "bla", b"refs/remotes/ambig2": "bla", b"refs/remotes/ambig2/HEAD": "bla", } self.assertEqual(b"refs/ambig2", parse_ref(r, b"ambig2")) def test_ambiguous_tag(self): r = { b"refs/tags/ambig3": "bla", b"refs/heads/ambig3": "bla", b"refs/remotes/ambig3": "bla", b"refs/remotes/ambig3/HEAD": "bla", } self.assertEqual(b"refs/tags/ambig3", parse_ref(r, b"ambig3")) def test_ambiguous_head(self): r = { b"refs/heads/ambig4": "bla", b"refs/remotes/ambig4": "bla", b"refs/remotes/ambig4/HEAD": "bla", } self.assertEqual(b"refs/heads/ambig4", parse_ref(r, b"ambig4")) def test_ambiguous_remote(self): r = {b"refs/remotes/ambig5": "bla", b"refs/remotes/ambig5/HEAD": "bla"} self.assertEqual(b"refs/remotes/ambig5", parse_ref(r, b"ambig5")) def test_ambiguous_remote_head(self): r = {b"refs/remotes/ambig6/HEAD": "bla"} self.assertEqual(b"refs/remotes/ambig6/HEAD", parse_ref(r, b"ambig6")) def test_heads_full(self): r = {b"refs/heads/foo": "bla"} self.assertEqual(b"refs/heads/foo", parse_ref(r, b"refs/heads/foo")) def test_heads_partial(self): r = {b"refs/heads/foo": "bla"} self.assertEqual(b"refs/heads/foo", parse_ref(r, b"heads/foo")) def test_tags_partial(self): r = {b"refs/tags/foo": "bla"} self.assertEqual(b"refs/tags/foo", parse_ref(r, b"tags/foo")) class ParseRefsTests(TestCase): def test_nonexistent(self): r = {} self.assertRaises(KeyError, parse_refs, r, [b"thisdoesnotexist"]) def test_head(self): r = {b"refs/heads/foo": "bla"} self.assertEqual([b"refs/heads/foo"], parse_refs(r, [b"foo"])) def test_full(self): r = {b"refs/heads/foo": "bla"} self.assertEqual([b"refs/heads/foo"], parse_refs(r, b"refs/heads/foo")) class ParseReftupleTests(TestCase): def test_nonexistent(self): r = {} self.assertRaises(KeyError, parse_reftuple, r, r, b"thisdoesnotexist") def test_head(self): r = {b"refs/heads/foo": "bla"} self.assertEqual( - (b"refs/heads/foo", b"refs/heads/foo", False), parse_reftuple(r, r, b"foo") + (b"refs/heads/foo", b"refs/heads/foo", False), + parse_reftuple(r, r, b"foo"), ) self.assertEqual( - (b"refs/heads/foo", b"refs/heads/foo", True), parse_reftuple(r, r, b"+foo") + (b"refs/heads/foo", b"refs/heads/foo", True), + parse_reftuple(r, r, b"+foo"), ) self.assertEqual( - (b"refs/heads/foo", b"refs/heads/foo", True), parse_reftuple(r, {}, b"+foo") + (b"refs/heads/foo", b"refs/heads/foo", True), + parse_reftuple(r, {}, b"+foo"), ) self.assertEqual( (b"refs/heads/foo", b"refs/heads/foo", True), parse_reftuple(r, {}, b"foo", True), ) def test_full(self): r = {b"refs/heads/foo": "bla"} self.assertEqual( (b"refs/heads/foo", b"refs/heads/foo", False), parse_reftuple(r, r, b"refs/heads/foo"), ) def test_no_left_ref(self): r = {b"refs/heads/foo": "bla"} self.assertEqual( - (None, b"refs/heads/foo", False), parse_reftuple(r, r, b":refs/heads/foo") + (None, b"refs/heads/foo", False), + parse_reftuple(r, r, b":refs/heads/foo"), ) def test_no_right_ref(self): r = {b"refs/heads/foo": "bla"} self.assertEqual( - (b"refs/heads/foo", None, False), parse_reftuple(r, r, b"refs/heads/foo:") + (b"refs/heads/foo", None, False), + parse_reftuple(r, r, b"refs/heads/foo:"), ) def test_default_with_string(self): r = {b"refs/heads/foo": "bla"} self.assertEqual( - (b"refs/heads/foo", b"refs/heads/foo", False), parse_reftuple(r, r, "foo") + (b"refs/heads/foo", b"refs/heads/foo", False), + parse_reftuple(r, r, "foo"), ) class ParseReftuplesTests(TestCase): def test_nonexistent(self): r = {} self.assertRaises(KeyError, parse_reftuples, r, r, [b"thisdoesnotexist"]) def test_head(self): r = {b"refs/heads/foo": "bla"} self.assertEqual( [(b"refs/heads/foo", b"refs/heads/foo", False)], parse_reftuples(r, r, [b"foo"]), ) def test_full(self): r = {b"refs/heads/foo": "bla"} self.assertEqual( [(b"refs/heads/foo", b"refs/heads/foo", False)], parse_reftuples(r, r, b"refs/heads/foo"), ) r = {b"refs/heads/foo": "bla"} self.assertEqual( [(b"refs/heads/foo", b"refs/heads/foo", True)], parse_reftuples(r, r, b"refs/heads/foo", True), ) class ParseTreeTests(TestCase): """Test parse_tree.""" def test_nonexistent(self): r = MemoryRepo() self.assertRaises(KeyError, parse_tree, r, "thisdoesnotexist") def test_from_commit(self): r = MemoryRepo() c1, c2, c3 = build_commit_graph(r.object_store, [[1], [2, 1], [3, 1, 2]]) self.assertEqual(r[c1.tree], parse_tree(r, c1.id)) self.assertEqual(r[c1.tree], parse_tree(r, c1.tree)) diff --git a/dulwich/tests/test_pack.py b/dulwich/tests/test_pack.py index 6c9f5876..54f3ebdc 100644 --- a/dulwich/tests/test_pack.py +++ b/dulwich/tests/test_pack.py @@ -1,1220 +1,1236 @@ # test_pack.py -- Tests for the handling of git packs. # Copyright (C) 2007 James Westby # Copyright (C) 2008 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Tests for Dulwich packs.""" from io import BytesIO from hashlib import sha1 import os import shutil import tempfile import zlib from dulwich.errors import ( ApplyDeltaError, ChecksumMismatch, ) from dulwich.file import ( GitFile, ) from dulwich.object_store import ( MemoryObjectStore, ) from dulwich.objects import ( hex_to_sha, sha_to_hex, Commit, Tree, Blob, ) from dulwich.pack import ( OFS_DELTA, REF_DELTA, MemoryPackIndex, Pack, PackData, apply_delta, create_delta, deltify_pack_objects, load_pack_index, UnpackedObject, read_zlib_chunks, write_pack_header, write_pack_index_v1, write_pack_index_v2, write_pack_object, write_pack, unpack_object, compute_file_sha, PackStreamReader, DeltaChainIterator, _delta_encode_size, _encode_copy_operation, ) from dulwich.tests import ( TestCase, ) from dulwich.tests.utils import ( make_object, build_pack, ) pack1_sha = b"bc63ddad95e7321ee734ea11a7a62d314e0d7481" a_sha = b"6f670c0fb53f9463760b7295fbb814e965fb20c8" tree_sha = b"b2a2766a2879c209ab1176e7e778b81ae422eeaa" commit_sha = b"f18faa16531ac570a3fdc8c7ca16682548dafd12" class PackTests(TestCase): """Base class for testing packs""" def setUp(self): super(PackTests, self).setUp() self.tempdir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.tempdir) datadir = os.path.abspath(os.path.join(os.path.dirname(__file__), "data/packs")) def get_pack_index(self, sha): """Returns a PackIndex from the datadir with the given sha""" return load_pack_index( os.path.join(self.datadir, "pack-%s.idx" % sha.decode("ascii")) ) def get_pack_data(self, sha): """Returns a PackData object from the datadir with the given sha""" return PackData( os.path.join(self.datadir, "pack-%s.pack" % sha.decode("ascii")) ) def get_pack(self, sha): return Pack(os.path.join(self.datadir, "pack-%s" % sha.decode("ascii"))) def assertSucceeds(self, func, *args, **kwargs): try: func(*args, **kwargs) except ChecksumMismatch as e: self.fail(e) class PackIndexTests(PackTests): """Class that tests the index of packfiles""" def test_object_index(self): """Tests that the correct object offset is returned from the index.""" p = self.get_pack_index(pack1_sha) self.assertRaises(KeyError, p.object_index, pack1_sha) self.assertEqual(p.object_index(a_sha), 178) self.assertEqual(p.object_index(tree_sha), 138) self.assertEqual(p.object_index(commit_sha), 12) def test_object_sha1(self): """Tests that the correct object offset is returned from the index.""" p = self.get_pack_index(pack1_sha) self.assertRaises(KeyError, p.object_sha1, 876) self.assertEqual(p.object_sha1(178), hex_to_sha(a_sha)) self.assertEqual(p.object_sha1(138), hex_to_sha(tree_sha)) self.assertEqual(p.object_sha1(12), hex_to_sha(commit_sha)) def test_index_len(self): p = self.get_pack_index(pack1_sha) self.assertEqual(3, len(p)) def test_get_stored_checksum(self): p = self.get_pack_index(pack1_sha) self.assertEqual( b"f2848e2ad16f329ae1c92e3b95e91888daa5bd01", sha_to_hex(p.get_stored_checksum()), ) self.assertEqual( b"721980e866af9a5f93ad674144e1459b8ba3e7b7", sha_to_hex(p.get_pack_checksum()), ) def test_index_check(self): p = self.get_pack_index(pack1_sha) self.assertSucceeds(p.check) def test_iterentries(self): p = self.get_pack_index(pack1_sha) entries = [(sha_to_hex(s), o, c) for s, o, c in p.iterentries()] self.assertEqual( [ (b"6f670c0fb53f9463760b7295fbb814e965fb20c8", 178, None), (b"b2a2766a2879c209ab1176e7e778b81ae422eeaa", 138, None), (b"f18faa16531ac570a3fdc8c7ca16682548dafd12", 12, None), ], entries, ) def test_iter(self): p = self.get_pack_index(pack1_sha) self.assertEqual(set([tree_sha, commit_sha, a_sha]), set(p)) class TestPackDeltas(TestCase): test_string1 = b"The answer was flailing in the wind" test_string2 = b"The answer was falling down the pipe" test_string3 = b"zzzzz" test_string_empty = b"" test_string_big = b"Z" * 8192 test_string_huge = b"Z" * 100000 def _test_roundtrip(self, base, target): self.assertEqual( target, b"".join(apply_delta(base, create_delta(base, target))) ) def test_nochange(self): self._test_roundtrip(self.test_string1, self.test_string1) def test_nochange_huge(self): self._test_roundtrip(self.test_string_huge, self.test_string_huge) def test_change(self): self._test_roundtrip(self.test_string1, self.test_string2) def test_rewrite(self): self._test_roundtrip(self.test_string1, self.test_string3) def test_empty_to_big(self): self._test_roundtrip(self.test_string_empty, self.test_string_big) def test_empty_to_huge(self): self._test_roundtrip(self.test_string_empty, self.test_string_huge) def test_huge_copy(self): self._test_roundtrip( self.test_string_huge + self.test_string1, self.test_string_huge + self.test_string2, ) def test_dest_overflow(self): self.assertRaises( ApplyDeltaError, apply_delta, b"a" * 0x10000, b"\x80\x80\x04\x80\x80\x04\x80" + b"a" * 0x10000, ) self.assertRaises( ApplyDeltaError, apply_delta, b"", b"\x00\x80\x02\xb0\x11\x11" ) def test_pypy_issue(self): # Test for https://github.com/jelmer/dulwich/issues/509 / # https://bitbucket.org/pypy/pypy/issues/2499/cpyext-pystring_asstring-doesnt-work chunks = [ b"tree 03207ccf58880a748188836155ceed72f03d65d6\n" b"parent 408fbab530fd4abe49249a636a10f10f44d07a21\n" b"author Victor Stinner " b"1421355207 +0100\n" b"committer Victor Stinner " b"1421355207 +0100\n" b"\n" b"Backout changeset 3a06020af8cf\n" b"\nStreamWriter: close() now clears the reference to the " b"transport\n" b"\nStreamWriter now raises an exception if it is closed: " b"write(), writelines(),\n" b"write_eof(), can_write_eof(), get_extra_info(), drain().\n" ] delta = [ b"\xcd\x03\xad\x03]tree ff3c181a393d5a7270cddc01ea863818a8621ca8\n" b"parent 20a103cc90135494162e819f98d0edfc1f1fba6b\x91]7\x0510738" b"\x91\x99@\x0b10738 +0100\x93\x04\x01\xc9" ] res = apply_delta(chunks, delta) expected = [ b"tree ff3c181a393d5a7270cddc01ea863818a8621ca8\n" b"parent 20a103cc90135494162e819f98d0edfc1f1fba6b", b"\nauthor Victor Stinner 14213", b"10738", b" +0100\ncommitter Victor Stinner " b"14213", b"10738 +0100", b"\n\nStreamWriter: close() now clears the reference to the " b"transport\n\n" b"StreamWriter now raises an exception if it is closed: " b"write(), writelines(),\n" b"write_eof(), can_write_eof(), get_extra_info(), drain().\n", ] self.assertEqual(b"".join(expected), b"".join(res)) class TestPackData(PackTests): """Tests getting the data from the packfile.""" def test_create_pack(self): self.get_pack_data(pack1_sha).close() def test_from_file(self): path = os.path.join(self.datadir, "pack-%s.pack" % pack1_sha.decode("ascii")) with open(path, "rb") as f: PackData.from_file(f, os.path.getsize(path)) def test_pack_len(self): with self.get_pack_data(pack1_sha) as p: self.assertEqual(3, len(p)) def test_index_check(self): with self.get_pack_data(pack1_sha) as p: self.assertSucceeds(p.check) def test_iterobjects(self): with self.get_pack_data(pack1_sha) as p: commit_data = ( b"tree b2a2766a2879c209ab1176e7e778b81ae422eeaa\n" b"author James Westby " b"1174945067 +0100\n" b"committer James Westby " b"1174945067 +0100\n" b"\n" b"Test commit\n" ) blob_sha = b"6f670c0fb53f9463760b7295fbb814e965fb20c8" tree_data = b"100644 a\0" + hex_to_sha(blob_sha) actual = [] for offset, type_num, chunks, crc32 in p.iterobjects(): actual.append((offset, type_num, b"".join(chunks), crc32)) self.assertEqual( [ (12, 1, commit_data, 3775879613), (138, 2, tree_data, 912998690), (178, 3, b"test 1\n", 1373561701), ], actual, ) def test_iterentries(self): with self.get_pack_data(pack1_sha) as p: entries = set((sha_to_hex(s), o, c) for s, o, c in p.iterentries()) self.assertEqual( set( [ - (b"6f670c0fb53f9463760b7295fbb814e965fb20c8", 178, 1373561701), - (b"b2a2766a2879c209ab1176e7e778b81ae422eeaa", 138, 912998690), - (b"f18faa16531ac570a3fdc8c7ca16682548dafd12", 12, 3775879613), + ( + b"6f670c0fb53f9463760b7295fbb814e965fb20c8", + 178, + 1373561701, + ), + ( + b"b2a2766a2879c209ab1176e7e778b81ae422eeaa", + 138, + 912998690, + ), + ( + b"f18faa16531ac570a3fdc8c7ca16682548dafd12", + 12, + 3775879613, + ), ] ), entries, ) def test_create_index_v1(self): with self.get_pack_data(pack1_sha) as p: filename = os.path.join(self.tempdir, "v1test.idx") p.create_index_v1(filename) idx1 = load_pack_index(filename) idx2 = self.get_pack_index(pack1_sha) self.assertEqual(idx1, idx2) def test_create_index_v2(self): with self.get_pack_data(pack1_sha) as p: filename = os.path.join(self.tempdir, "v2test.idx") p.create_index_v2(filename) idx1 = load_pack_index(filename) idx2 = self.get_pack_index(pack1_sha) self.assertEqual(idx1, idx2) def test_compute_file_sha(self): f = BytesIO(b"abcd1234wxyz") self.assertEqual( sha1(b"abcd1234wxyz").hexdigest(), compute_file_sha(f).hexdigest() ) self.assertEqual( sha1(b"abcd1234wxyz").hexdigest(), compute_file_sha(f, buffer_size=5).hexdigest(), ) self.assertEqual( - sha1(b"abcd1234").hexdigest(), compute_file_sha(f, end_ofs=-4).hexdigest() + sha1(b"abcd1234").hexdigest(), + compute_file_sha(f, end_ofs=-4).hexdigest(), ) self.assertEqual( - sha1(b"1234wxyz").hexdigest(), compute_file_sha(f, start_ofs=4).hexdigest() + sha1(b"1234wxyz").hexdigest(), + compute_file_sha(f, start_ofs=4).hexdigest(), ) self.assertEqual( sha1(b"1234").hexdigest(), compute_file_sha(f, start_ofs=4, end_ofs=-4).hexdigest(), ) def test_compute_file_sha_short_file(self): f = BytesIO(b"abcd1234wxyz") self.assertRaises(AssertionError, compute_file_sha, f, end_ofs=-20) self.assertRaises(AssertionError, compute_file_sha, f, end_ofs=20) self.assertRaises( AssertionError, compute_file_sha, f, start_ofs=10, end_ofs=-12 ) class TestPack(PackTests): def test_len(self): with self.get_pack(pack1_sha) as p: self.assertEqual(3, len(p)) def test_contains(self): with self.get_pack(pack1_sha) as p: self.assertTrue(tree_sha in p) def test_get(self): with self.get_pack(pack1_sha) as p: self.assertEqual(type(p[tree_sha]), Tree) def test_iter(self): with self.get_pack(pack1_sha) as p: self.assertEqual(set([tree_sha, commit_sha, a_sha]), set(p)) def test_iterobjects(self): with self.get_pack(pack1_sha) as p: expected = set([p[s] for s in [commit_sha, tree_sha, a_sha]]) self.assertEqual(expected, set(list(p.iterobjects()))) def test_pack_tuples(self): with self.get_pack(pack1_sha) as p: tuples = p.pack_tuples() expected = set([(p[s], None) for s in [commit_sha, tree_sha, a_sha]]) self.assertEqual(expected, set(list(tuples))) self.assertEqual(expected, set(list(tuples))) self.assertEqual(3, len(tuples)) def test_get_object_at(self): """Tests random access for non-delta objects""" with self.get_pack(pack1_sha) as p: obj = p[a_sha] self.assertEqual(obj.type_name, b"blob") self.assertEqual(obj.sha().hexdigest().encode("ascii"), a_sha) obj = p[tree_sha] self.assertEqual(obj.type_name, b"tree") self.assertEqual(obj.sha().hexdigest().encode("ascii"), tree_sha) obj = p[commit_sha] self.assertEqual(obj.type_name, b"commit") self.assertEqual(obj.sha().hexdigest().encode("ascii"), commit_sha) def test_copy(self): with self.get_pack(pack1_sha) as origpack: self.assertSucceeds(origpack.index.check) basename = os.path.join(self.tempdir, "Elch") write_pack(basename, origpack.pack_tuples()) with Pack(basename) as newpack: self.assertEqual(origpack, newpack) self.assertSucceeds(newpack.index.check) self.assertEqual(origpack.name(), newpack.name()) self.assertEqual( origpack.index.get_pack_checksum(), newpack.index.get_pack_checksum(), ) wrong_version = origpack.index.version != newpack.index.version orig_checksum = origpack.index.get_stored_checksum() new_checksum = newpack.index.get_stored_checksum() self.assertTrue(wrong_version or orig_checksum == new_checksum) def test_commit_obj(self): with self.get_pack(pack1_sha) as p: commit = p[commit_sha] self.assertEqual(b"James Westby ", commit.author) self.assertEqual([], commit.parents) def _copy_pack(self, origpack): basename = os.path.join(self.tempdir, "somepack") write_pack(basename, origpack.pack_tuples()) return Pack(basename) def test_keep_no_message(self): with self.get_pack(pack1_sha) as p: p = self._copy_pack(p) with p: keepfile_name = p.keep() # file should exist self.assertTrue(os.path.exists(keepfile_name)) with open(keepfile_name, "r") as f: buf = f.read() self.assertEqual("", buf) def test_keep_message(self): with self.get_pack(pack1_sha) as p: p = self._copy_pack(p) msg = b"some message" with p: keepfile_name = p.keep(msg) # file should exist self.assertTrue(os.path.exists(keepfile_name)) # and contain the right message, with a linefeed with open(keepfile_name, "rb") as f: buf = f.read() self.assertEqual(msg + b"\n", buf) def test_name(self): with self.get_pack(pack1_sha) as p: self.assertEqual(pack1_sha, p.name()) def test_length_mismatch(self): with self.get_pack_data(pack1_sha) as data: index = self.get_pack_index(pack1_sha) Pack.from_objects(data, index).check_length_and_checksum() data._file.seek(12) bad_file = BytesIO() write_pack_header(bad_file, 9999) bad_file.write(data._file.read()) bad_file = BytesIO(bad_file.getvalue()) bad_data = PackData("", file=bad_file) bad_pack = Pack.from_lazy_objects(lambda: bad_data, lambda: index) self.assertRaises(AssertionError, lambda: bad_pack.data) self.assertRaises( AssertionError, lambda: bad_pack.check_length_and_checksum() ) def test_checksum_mismatch(self): with self.get_pack_data(pack1_sha) as data: index = self.get_pack_index(pack1_sha) Pack.from_objects(data, index).check_length_and_checksum() data._file.seek(0) bad_file = BytesIO(data._file.read()[:-20] + (b"\xff" * 20)) bad_data = PackData("", file=bad_file) bad_pack = Pack.from_lazy_objects(lambda: bad_data, lambda: index) self.assertRaises(ChecksumMismatch, lambda: bad_pack.data) self.assertRaises( ChecksumMismatch, lambda: bad_pack.check_length_and_checksum() ) def test_iterobjects_2(self): with self.get_pack(pack1_sha) as p: objs = dict((o.id, o) for o in p.iterobjects()) self.assertEqual(3, len(objs)) self.assertEqual(sorted(objs), sorted(p.index)) self.assertTrue(isinstance(objs[a_sha], Blob)) self.assertTrue(isinstance(objs[tree_sha], Tree)) self.assertTrue(isinstance(objs[commit_sha], Commit)) class TestThinPack(PackTests): def setUp(self): super(TestThinPack, self).setUp() self.store = MemoryObjectStore() self.blobs = {} for blob in (b"foo", b"bar", b"foo1234", b"bar2468"): self.blobs[blob] = make_object(Blob, data=blob) self.store.add_object(self.blobs[b"foo"]) self.store.add_object(self.blobs[b"bar"]) # Build a thin pack. 'foo' is as an external reference, 'bar' an # internal reference. self.pack_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.pack_dir) self.pack_prefix = os.path.join(self.pack_dir, "pack") with open(self.pack_prefix + ".pack", "wb") as f: build_pack( f, [ (REF_DELTA, (self.blobs[b"foo"].id, b"foo1234")), (Blob.type_num, b"bar"), (REF_DELTA, (self.blobs[b"bar"].id, b"bar2468")), ], store=self.store, ) # Index the new pack. with self.make_pack(True) as pack: with PackData(pack._data_path) as data: data.pack = pack data.create_index(self.pack_prefix + ".idx") del self.store[self.blobs[b"bar"].id] def make_pack(self, resolve_ext_ref): return Pack( self.pack_prefix, resolve_ext_ref=self.store.get_raw if resolve_ext_ref else None, ) def test_get_raw(self): with self.make_pack(False) as p: self.assertRaises(KeyError, p.get_raw, self.blobs[b"foo1234"].id) with self.make_pack(True) as p: self.assertEqual((3, b"foo1234"), p.get_raw(self.blobs[b"foo1234"].id)) def test_get_raw_unresolved(self): with self.make_pack(False) as p: self.assertEqual( ( 7, b"\x19\x10(\x15f=#\xf8\xb7ZG\xe7\xa0\x19e\xdc\xdc\x96F\x8c", [b"x\x9ccf\x9f\xc0\xccbhdl\x02\x00\x06f\x01l"], ), p.get_raw_unresolved(self.blobs[b"foo1234"].id), ) with self.make_pack(True) as p: self.assertEqual( ( 7, b"\x19\x10(\x15f=#\xf8\xb7ZG\xe7\xa0\x19e\xdc\xdc\x96F\x8c", [b"x\x9ccf\x9f\xc0\xccbhdl\x02\x00\x06f\x01l"], ), p.get_raw_unresolved(self.blobs[b"foo1234"].id), ) def test_iterobjects(self): with self.make_pack(False) as p: self.assertRaises(KeyError, list, p.iterobjects()) with self.make_pack(True) as p: self.assertEqual( sorted( [ self.blobs[b"foo1234"].id, self.blobs[b"bar"].id, self.blobs[b"bar2468"].id, ] ), sorted(o.id for o in p.iterobjects()), ) class WritePackTests(TestCase): def test_write_pack_header(self): f = BytesIO() write_pack_header(f, 42) self.assertEqual(b"PACK\x00\x00\x00\x02\x00\x00\x00*", f.getvalue()) def test_write_pack_object(self): f = BytesIO() f.write(b"header") offset = f.tell() crc32 = write_pack_object(f, Blob.type_num, b"blob") self.assertEqual(crc32, zlib.crc32(f.getvalue()[6:]) & 0xFFFFFFFF) f.write(b"x") # unpack_object needs extra trailing data. f.seek(offset) unpacked, unused = unpack_object(f.read, compute_crc32=True) self.assertEqual(Blob.type_num, unpacked.pack_type_num) self.assertEqual(Blob.type_num, unpacked.obj_type_num) self.assertEqual([b"blob"], unpacked.decomp_chunks) self.assertEqual(crc32, unpacked.crc32) self.assertEqual(b"x", unused) def test_write_pack_object_sha(self): f = BytesIO() f.write(b"header") offset = f.tell() sha_a = sha1(b"foo") sha_b = sha_a.copy() write_pack_object(f, Blob.type_num, b"blob", sha=sha_a) self.assertNotEqual(sha_a.digest(), sha_b.digest()) sha_b.update(f.getvalue()[offset:]) self.assertEqual(sha_a.digest(), sha_b.digest()) def test_write_pack_object_compression_level(self): f = BytesIO() f.write(b"header") offset = f.tell() sha_a = sha1(b"foo") sha_b = sha_a.copy() write_pack_object(f, Blob.type_num, b"blob", sha=sha_a, compression_level=6) self.assertNotEqual(sha_a.digest(), sha_b.digest()) sha_b.update(f.getvalue()[offset:]) self.assertEqual(sha_a.digest(), sha_b.digest()) pack_checksum = hex_to_sha("721980e866af9a5f93ad674144e1459b8ba3e7b7") class BaseTestPackIndexWriting(object): def assertSucceeds(self, func, *args, **kwargs): try: func(*args, **kwargs) except ChecksumMismatch as e: self.fail(e) def index(self, filename, entries, pack_checksum): raise NotImplementedError(self.index) def test_empty(self): idx = self.index("empty.idx", [], pack_checksum) self.assertEqual(idx.get_pack_checksum(), pack_checksum) self.assertEqual(0, len(idx)) def test_large(self): entry1_sha = hex_to_sha("4e6388232ec39792661e2e75db8fb117fc869ce6") entry2_sha = hex_to_sha("e98f071751bd77f59967bfa671cd2caebdccc9a2") entries = [ (entry1_sha, 0xF2972D0830529B87, 24), (entry2_sha, (~0xF2972D0830529B87) & (2 ** 64 - 1), 92), ] if not self._supports_large: self.assertRaises( TypeError, self.index, "single.idx", entries, pack_checksum ) return idx = self.index("single.idx", entries, pack_checksum) self.assertEqual(idx.get_pack_checksum(), pack_checksum) self.assertEqual(2, len(idx)) actual_entries = list(idx.iterentries()) self.assertEqual(len(entries), len(actual_entries)) for mine, actual in zip(entries, actual_entries): my_sha, my_offset, my_crc = mine actual_sha, actual_offset, actual_crc = actual self.assertEqual(my_sha, actual_sha) self.assertEqual(my_offset, actual_offset) if self._has_crc32_checksum: self.assertEqual(my_crc, actual_crc) else: self.assertTrue(actual_crc is None) def test_single(self): entry_sha = hex_to_sha("6f670c0fb53f9463760b7295fbb814e965fb20c8") my_entries = [(entry_sha, 178, 42)] idx = self.index("single.idx", my_entries, pack_checksum) self.assertEqual(idx.get_pack_checksum(), pack_checksum) self.assertEqual(1, len(idx)) actual_entries = list(idx.iterentries()) self.assertEqual(len(my_entries), len(actual_entries)) for mine, actual in zip(my_entries, actual_entries): my_sha, my_offset, my_crc = mine actual_sha, actual_offset, actual_crc = actual self.assertEqual(my_sha, actual_sha) self.assertEqual(my_offset, actual_offset) if self._has_crc32_checksum: self.assertEqual(my_crc, actual_crc) else: self.assertTrue(actual_crc is None) class BaseTestFilePackIndexWriting(BaseTestPackIndexWriting): def setUp(self): self.tempdir = tempfile.mkdtemp() def tearDown(self): shutil.rmtree(self.tempdir) def index(self, filename, entries, pack_checksum): path = os.path.join(self.tempdir, filename) self.writeIndex(path, entries, pack_checksum) idx = load_pack_index(path) self.assertSucceeds(idx.check) self.assertEqual(idx.version, self._expected_version) return idx def writeIndex(self, filename, entries, pack_checksum): # FIXME: Write to BytesIO instead rather than hitting disk ? with GitFile(filename, "wb") as f: self._write_fn(f, entries, pack_checksum) class TestMemoryIndexWriting(TestCase, BaseTestPackIndexWriting): def setUp(self): TestCase.setUp(self) self._has_crc32_checksum = True self._supports_large = True def index(self, filename, entries, pack_checksum): return MemoryPackIndex(entries, pack_checksum) def tearDown(self): TestCase.tearDown(self) class TestPackIndexWritingv1(TestCase, BaseTestFilePackIndexWriting): def setUp(self): TestCase.setUp(self) BaseTestFilePackIndexWriting.setUp(self) self._has_crc32_checksum = False self._expected_version = 1 self._supports_large = False self._write_fn = write_pack_index_v1 def tearDown(self): TestCase.tearDown(self) BaseTestFilePackIndexWriting.tearDown(self) class TestPackIndexWritingv2(TestCase, BaseTestFilePackIndexWriting): def setUp(self): TestCase.setUp(self) BaseTestFilePackIndexWriting.setUp(self) self._has_crc32_checksum = True self._supports_large = True self._expected_version = 2 self._write_fn = write_pack_index_v2 def tearDown(self): TestCase.tearDown(self) BaseTestFilePackIndexWriting.tearDown(self) class ReadZlibTests(TestCase): decomp = ( b"tree 4ada885c9196b6b6fa08744b5862bf92896fc002\n" b"parent None\n" b"author Jelmer Vernooij 1228980214 +0000\n" b"committer Jelmer Vernooij 1228980214 +0000\n" b"\n" b"Provide replacement for mmap()'s offset argument." ) comp = zlib.compress(decomp) extra = b"nextobject" def setUp(self): super(ReadZlibTests, self).setUp() self.read = BytesIO(self.comp + self.extra).read self.unpacked = UnpackedObject(Tree.type_num, None, len(self.decomp), 0) def test_decompress_size(self): good_decomp_len = len(self.decomp) self.unpacked.decomp_len = -1 self.assertRaises(ValueError, read_zlib_chunks, self.read, self.unpacked) self.unpacked.decomp_len = good_decomp_len - 1 self.assertRaises(zlib.error, read_zlib_chunks, self.read, self.unpacked) self.unpacked.decomp_len = good_decomp_len + 1 self.assertRaises(zlib.error, read_zlib_chunks, self.read, self.unpacked) def test_decompress_truncated(self): read = BytesIO(self.comp[:10]).read self.assertRaises(zlib.error, read_zlib_chunks, read, self.unpacked) read = BytesIO(self.comp).read self.assertRaises(zlib.error, read_zlib_chunks, read, self.unpacked) def test_decompress_empty(self): unpacked = UnpackedObject(Tree.type_num, None, 0, None) comp = zlib.compress(b"") read = BytesIO(comp + self.extra).read unused = read_zlib_chunks(read, unpacked) self.assertEqual(b"", b"".join(unpacked.decomp_chunks)) self.assertNotEqual(b"", unused) self.assertEqual(self.extra, unused + read()) def test_decompress_no_crc32(self): self.unpacked.crc32 = None read_zlib_chunks(self.read, self.unpacked) self.assertEqual(None, self.unpacked.crc32) def _do_decompress_test(self, buffer_size, **kwargs): unused = read_zlib_chunks( self.read, self.unpacked, buffer_size=buffer_size, **kwargs ) self.assertEqual(self.decomp, b"".join(self.unpacked.decomp_chunks)) self.assertEqual(zlib.crc32(self.comp), self.unpacked.crc32) self.assertNotEqual(b"", unused) self.assertEqual(self.extra, unused + self.read()) def test_simple_decompress(self): self._do_decompress_test(4096) self.assertEqual(None, self.unpacked.comp_chunks) # These buffer sizes are not intended to be realistic, but rather simulate # larger buffer sizes that may end at various places. def test_decompress_buffer_size_1(self): self._do_decompress_test(1) def test_decompress_buffer_size_2(self): self._do_decompress_test(2) def test_decompress_buffer_size_3(self): self._do_decompress_test(3) def test_decompress_buffer_size_4(self): self._do_decompress_test(4) def test_decompress_include_comp(self): self._do_decompress_test(4096, include_comp=True) self.assertEqual(self.comp, b"".join(self.unpacked.comp_chunks)) class DeltifyTests(TestCase): def test_empty(self): self.assertEqual([], list(deltify_pack_objects([]))) def test_single(self): b = Blob.from_string(b"foo") self.assertEqual( [(b.type_num, b.sha().digest(), None, b.as_raw_string())], list(deltify_pack_objects([(b, b"")])), ) def test_simple_delta(self): b1 = Blob.from_string(b"a" * 101) b2 = Blob.from_string(b"a" * 100) delta = create_delta(b1.as_raw_string(), b2.as_raw_string()) self.assertEqual( [ (b1.type_num, b1.sha().digest(), None, b1.as_raw_string()), (b2.type_num, b2.sha().digest(), b1.sha().digest(), delta), ], list(deltify_pack_objects([(b1, b""), (b2, b"")])), ) class TestPackStreamReader(TestCase): def test_read_objects_emtpy(self): f = BytesIO() build_pack(f, []) reader = PackStreamReader(f.read) self.assertEqual(0, len(list(reader.read_objects()))) def test_read_objects(self): f = BytesIO() entries = build_pack( f, [ (Blob.type_num, b"blob"), (OFS_DELTA, (0, b"blob1")), ], ) reader = PackStreamReader(f.read) objects = list(reader.read_objects(compute_crc32=True)) self.assertEqual(2, len(objects)) unpacked_blob, unpacked_delta = objects self.assertEqual(entries[0][0], unpacked_blob.offset) self.assertEqual(Blob.type_num, unpacked_blob.pack_type_num) self.assertEqual(Blob.type_num, unpacked_blob.obj_type_num) self.assertEqual(None, unpacked_blob.delta_base) self.assertEqual(b"blob", b"".join(unpacked_blob.decomp_chunks)) self.assertEqual(entries[0][4], unpacked_blob.crc32) self.assertEqual(entries[1][0], unpacked_delta.offset) self.assertEqual(OFS_DELTA, unpacked_delta.pack_type_num) self.assertEqual(None, unpacked_delta.obj_type_num) self.assertEqual( - unpacked_delta.offset - unpacked_blob.offset, unpacked_delta.delta_base + unpacked_delta.offset - unpacked_blob.offset, + unpacked_delta.delta_base, ) delta = create_delta(b"blob", b"blob1") self.assertEqual(delta, b"".join(unpacked_delta.decomp_chunks)) self.assertEqual(entries[1][4], unpacked_delta.crc32) def test_read_objects_buffered(self): f = BytesIO() build_pack( f, [ (Blob.type_num, b"blob"), (OFS_DELTA, (0, b"blob1")), ], ) reader = PackStreamReader(f.read, zlib_bufsize=4) self.assertEqual(2, len(list(reader.read_objects()))) def test_read_objects_empty(self): reader = PackStreamReader(BytesIO().read) self.assertEqual([], list(reader.read_objects())) class TestPackIterator(DeltaChainIterator): _compute_crc32 = True def __init__(self, *args, **kwargs): super(TestPackIterator, self).__init__(*args, **kwargs) self._unpacked_offsets = set() def _result(self, unpacked): """Return entries in the same format as build_pack.""" return ( unpacked.offset, unpacked.obj_type_num, b"".join(unpacked.obj_chunks), unpacked.sha(), unpacked.crc32, ) def _resolve_object(self, offset, pack_type_num, base_chunks): assert offset not in self._unpacked_offsets, ( "Attempted to re-inflate offset %i" % offset ) self._unpacked_offsets.add(offset) return super(TestPackIterator, self)._resolve_object( offset, pack_type_num, base_chunks ) class DeltaChainIteratorTests(TestCase): def setUp(self): super(DeltaChainIteratorTests, self).setUp() self.store = MemoryObjectStore() self.fetched = set() def store_blobs(self, blobs_data): blobs = [] for data in blobs_data: blob = make_object(Blob, data=data) blobs.append(blob) self.store.add_object(blob) return blobs def get_raw_no_repeat(self, bin_sha): """Wrapper around store.get_raw that doesn't allow repeat lookups.""" hex_sha = sha_to_hex(bin_sha) self.assertFalse( - hex_sha in self.fetched, "Attempted to re-fetch object %s" % hex_sha + hex_sha in self.fetched, + "Attempted to re-fetch object %s" % hex_sha, ) self.fetched.add(hex_sha) return self.store.get_raw(hex_sha) def make_pack_iter(self, f, thin=None): if thin is None: thin = bool(list(self.store)) resolve_ext_ref = thin and self.get_raw_no_repeat or None data = PackData("test.pack", file=f) return TestPackIterator.for_pack_data(data, resolve_ext_ref=resolve_ext_ref) def assertEntriesMatch(self, expected_indexes, entries, pack_iter): expected = [entries[i] for i in expected_indexes] self.assertEqual(expected, list(pack_iter._walk_all_chains())) def test_no_deltas(self): f = BytesIO() entries = build_pack( f, [ (Commit.type_num, b"commit"), (Blob.type_num, b"blob"), (Tree.type_num, b"tree"), ], ) self.assertEntriesMatch([0, 1, 2], entries, self.make_pack_iter(f)) def test_ofs_deltas(self): f = BytesIO() entries = build_pack( f, [ (Blob.type_num, b"blob"), (OFS_DELTA, (0, b"blob1")), (OFS_DELTA, (0, b"blob2")), ], ) self.assertEntriesMatch([0, 1, 2], entries, self.make_pack_iter(f)) def test_ofs_deltas_chain(self): f = BytesIO() entries = build_pack( f, [ (Blob.type_num, b"blob"), (OFS_DELTA, (0, b"blob1")), (OFS_DELTA, (1, b"blob2")), ], ) self.assertEntriesMatch([0, 1, 2], entries, self.make_pack_iter(f)) def test_ref_deltas(self): f = BytesIO() entries = build_pack( f, [ (REF_DELTA, (1, b"blob1")), (Blob.type_num, (b"blob")), (REF_DELTA, (1, b"blob2")), ], ) self.assertEntriesMatch([1, 0, 2], entries, self.make_pack_iter(f)) def test_ref_deltas_chain(self): f = BytesIO() entries = build_pack( f, [ (REF_DELTA, (2, b"blob1")), (Blob.type_num, (b"blob")), (REF_DELTA, (1, b"blob2")), ], ) self.assertEntriesMatch([1, 2, 0], entries, self.make_pack_iter(f)) def test_ofs_and_ref_deltas(self): # Deltas pending on this offset are popped before deltas depending on # this ref. f = BytesIO() entries = build_pack( f, [ (REF_DELTA, (1, b"blob1")), (Blob.type_num, (b"blob")), (OFS_DELTA, (1, b"blob2")), ], ) self.assertEntriesMatch([1, 2, 0], entries, self.make_pack_iter(f)) def test_mixed_chain(self): f = BytesIO() entries = build_pack( f, [ (Blob.type_num, b"blob"), (REF_DELTA, (2, b"blob2")), (OFS_DELTA, (0, b"blob1")), (OFS_DELTA, (1, b"blob3")), (OFS_DELTA, (0, b"bob")), ], ) self.assertEntriesMatch([0, 2, 4, 1, 3], entries, self.make_pack_iter(f)) def test_long_chain(self): n = 100 objects_spec = [(Blob.type_num, b"blob")] for i in range(n): objects_spec.append((OFS_DELTA, (i, b"blob" + str(i).encode("ascii")))) f = BytesIO() entries = build_pack(f, objects_spec) self.assertEntriesMatch(range(n + 1), entries, self.make_pack_iter(f)) def test_branchy_chain(self): n = 100 objects_spec = [(Blob.type_num, b"blob")] for i in range(n): objects_spec.append((OFS_DELTA, (0, b"blob" + str(i).encode("ascii")))) f = BytesIO() entries = build_pack(f, objects_spec) self.assertEntriesMatch(range(n + 1), entries, self.make_pack_iter(f)) def test_ext_ref(self): (blob,) = self.store_blobs([b"blob"]) f = BytesIO() entries = build_pack(f, [(REF_DELTA, (blob.id, b"blob1"))], store=self.store) pack_iter = self.make_pack_iter(f) self.assertEntriesMatch([0], entries, pack_iter) self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs()) def test_ext_ref_chain(self): (blob,) = self.store_blobs([b"blob"]) f = BytesIO() entries = build_pack( f, [ (REF_DELTA, (1, b"blob2")), (REF_DELTA, (blob.id, b"blob1")), ], store=self.store, ) pack_iter = self.make_pack_iter(f) self.assertEntriesMatch([1, 0], entries, pack_iter) self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs()) def test_ext_ref_chain_degenerate(self): # Test a degenerate case where the sender is sending a REF_DELTA # object that expands to an object already in the repository. (blob,) = self.store_blobs([b"blob"]) (blob2,) = self.store_blobs([b"blob2"]) assert blob.id < blob2.id f = BytesIO() entries = build_pack( f, [ (REF_DELTA, (blob.id, b"blob2")), (REF_DELTA, (0, b"blob3")), ], store=self.store, ) pack_iter = self.make_pack_iter(f) self.assertEntriesMatch([0, 1], entries, pack_iter) self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs()) def test_ext_ref_multiple_times(self): (blob,) = self.store_blobs([b"blob"]) f = BytesIO() entries = build_pack( f, [ (REF_DELTA, (blob.id, b"blob1")), (REF_DELTA, (blob.id, b"blob2")), ], store=self.store, ) pack_iter = self.make_pack_iter(f) self.assertEntriesMatch([0, 1], entries, pack_iter) self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs()) def test_multiple_ext_refs(self): b1, b2 = self.store_blobs([b"foo", b"bar"]) f = BytesIO() entries = build_pack( f, [ (REF_DELTA, (b1.id, b"foo1")), (REF_DELTA, (b2.id, b"bar2")), ], store=self.store, ) pack_iter = self.make_pack_iter(f) self.assertEntriesMatch([0, 1], entries, pack_iter) self.assertEqual([hex_to_sha(b1.id), hex_to_sha(b2.id)], pack_iter.ext_refs()) def test_bad_ext_ref_non_thin_pack(self): (blob,) = self.store_blobs([b"blob"]) f = BytesIO() build_pack(f, [(REF_DELTA, (blob.id, b"blob1"))], store=self.store) pack_iter = self.make_pack_iter(f, thin=False) try: list(pack_iter._walk_all_chains()) self.fail() except KeyError as e: self.assertEqual(([blob.id],), e.args) def test_bad_ext_ref_thin_pack(self): b1, b2, b3 = self.store_blobs([b"foo", b"bar", b"baz"]) f = BytesIO() build_pack( f, [ (REF_DELTA, (1, b"foo99")), (REF_DELTA, (b1.id, b"foo1")), (REF_DELTA, (b2.id, b"bar2")), (REF_DELTA, (b3.id, b"baz3")), ], store=self.store, ) del self.store[b2.id] del self.store[b3.id] pack_iter = self.make_pack_iter(f) try: list(pack_iter._walk_all_chains()) self.fail() except KeyError as e: self.assertEqual((sorted([b2.id, b3.id]),), (sorted(e.args[0]),)) class DeltaEncodeSizeTests(TestCase): def test_basic(self): self.assertEqual(b"\x00", _delta_encode_size(0)) self.assertEqual(b"\x01", _delta_encode_size(1)) self.assertEqual(b"\xfa\x01", _delta_encode_size(250)) self.assertEqual(b"\xe8\x07", _delta_encode_size(1000)) self.assertEqual(b"\xa0\x8d\x06", _delta_encode_size(100000)) class EncodeCopyOperationTests(TestCase): def test_basic(self): self.assertEqual(b"\x80", _encode_copy_operation(0, 0)) self.assertEqual(b"\x91\x01\x0a", _encode_copy_operation(1, 10)) self.assertEqual(b"\xb1\x64\xe8\x03", _encode_copy_operation(100, 1000)) self.assertEqual(b"\x93\xe8\x03\x01", _encode_copy_operation(1000, 1)) diff --git a/dulwich/tests/test_patch.py b/dulwich/tests/test_patch.py index 2a252cb0..a553a34a 100644 --- a/dulwich/tests/test_patch.py +++ b/dulwich/tests/test_patch.py @@ -1,627 +1,647 @@ # test_patch.py -- tests for patch.py # Copyright (C) 2010 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Tests for patch.py.""" from io import BytesIO, StringIO from dulwich.objects import ( Blob, Commit, S_IFGITLINK, Tree, ) from dulwich.object_store import ( MemoryObjectStore, ) from dulwich.patch import ( get_summary, git_am_patch_split, write_blob_diff, write_commit_patch, write_object_diff, write_tree_diff, ) from dulwich.tests import ( SkipTest, TestCase, ) class WriteCommitPatchTests(TestCase): def test_simple_bytesio(self): f = BytesIO() c = Commit() c.committer = c.author = b"Jelmer " c.commit_time = c.author_time = 1271350201 c.commit_timezone = c.author_timezone = 0 c.message = b"This is the first line\nAnd this is the second line.\n" c.tree = Tree().id write_commit_patch(f, c, b"CONTENTS", (1, 1), version="custom") f.seek(0) lines = f.readlines() self.assertTrue( lines[0].startswith(b"From 0b0d34d1b5b596c928adc9a727a4b9e03d025298") ) self.assertEqual(lines[1], b"From: Jelmer \n") self.assertTrue(lines[2].startswith(b"Date: ")) self.assertEqual( [ b"Subject: [PATCH 1/1] This is the first line\n", b"And this is the second line.\n", b"\n", b"\n", b"---\n", ], lines[3:8], ) self.assertEqual([b"CONTENTS-- \n", b"custom\n"], lines[-2:]) if len(lines) >= 12: # diffstat may not be present self.assertEqual(lines[8], b" 0 files changed\n") class ReadGitAmPatch(TestCase): def test_extract_string(self): text = b"""\ From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001 From: Jelmer Vernooij Date: Thu, 15 Apr 2010 15:40:28 +0200 Subject: [PATCH 1/2] Remove executable bit from prey.ico (triggers a warning). --- pixmaps/prey.ico | Bin 9662 -> 9662 bytes 1 files changed, 0 insertions(+), 0 deletions(-) mode change 100755 => 100644 pixmaps/prey.ico -- 1.7.0.4 """ # noqa: W291 c, diff, version = git_am_patch_split(StringIO(text.decode("utf-8")), "utf-8") self.assertEqual(b"Jelmer Vernooij ", c.committer) self.assertEqual(b"Jelmer Vernooij ", c.author) self.assertEqual( b"Remove executable bit from prey.ico " b"(triggers a warning).\n", c.message, ) self.assertEqual( b""" pixmaps/prey.ico | Bin 9662 -> 9662 bytes 1 files changed, 0 insertions(+), 0 deletions(-) mode change 100755 => 100644 pixmaps/prey.ico """, diff, ) self.assertEqual(b"1.7.0.4", version) def test_extract_bytes(self): text = b"""\ From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001 From: Jelmer Vernooij Date: Thu, 15 Apr 2010 15:40:28 +0200 Subject: [PATCH 1/2] Remove executable bit from prey.ico (triggers a warning). --- pixmaps/prey.ico | Bin 9662 -> 9662 bytes 1 files changed, 0 insertions(+), 0 deletions(-) mode change 100755 => 100644 pixmaps/prey.ico -- 1.7.0.4 """ # noqa: W291 c, diff, version = git_am_patch_split(BytesIO(text)) self.assertEqual(b"Jelmer Vernooij ", c.committer) self.assertEqual(b"Jelmer Vernooij ", c.author) self.assertEqual( b"Remove executable bit from prey.ico " b"(triggers a warning).\n", c.message, ) self.assertEqual( b""" pixmaps/prey.ico | Bin 9662 -> 9662 bytes 1 files changed, 0 insertions(+), 0 deletions(-) mode change 100755 => 100644 pixmaps/prey.ico """, diff, ) self.assertEqual(b"1.7.0.4", version) def test_extract_spaces(self): text = b"""From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001 From: Jelmer Vernooij Date: Thu, 15 Apr 2010 15:40:28 +0200 Subject: [Dulwich-users] [PATCH] Added unit tests for dulwich.object_store.tree_lookup_path. * dulwich/tests/test_object_store.py (TreeLookupPathTests): This test case contains a few tests that ensure the tree_lookup_path function works as expected. --- pixmaps/prey.ico | Bin 9662 -> 9662 bytes 1 files changed, 0 insertions(+), 0 deletions(-) mode change 100755 => 100644 pixmaps/prey.ico -- 1.7.0.4 """ # noqa: W291 c, diff, version = git_am_patch_split(BytesIO(text), "utf-8") self.assertEqual( b"""\ Added unit tests for dulwich.object_store.tree_lookup_path. * dulwich/tests/test_object_store.py (TreeLookupPathTests): This test case contains a few tests that ensure the tree_lookup_path function works as expected. """, c.message, ) def test_extract_pseudo_from_header(self): text = b"""From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001 From: Jelmer Vernooij Date: Thu, 15 Apr 2010 15:40:28 +0200 Subject: [Dulwich-users] [PATCH] Added unit tests for dulwich.object_store.tree_lookup_path. From: Jelmer Vernooij * dulwich/tests/test_object_store.py (TreeLookupPathTests): This test case contains a few tests that ensure the tree_lookup_path function works as expected. --- pixmaps/prey.ico | Bin 9662 -> 9662 bytes 1 files changed, 0 insertions(+), 0 deletions(-) mode change 100755 => 100644 pixmaps/prey.ico -- 1.7.0.4 """ # noqa: W291 c, diff, version = git_am_patch_split(BytesIO(text), "utf-8") self.assertEqual(b"Jelmer Vernooij ", c.author) self.assertEqual( b"""\ Added unit tests for dulwich.object_store.tree_lookup_path. * dulwich/tests/test_object_store.py (TreeLookupPathTests): This test case contains a few tests that ensure the tree_lookup_path function works as expected. """, c.message, ) def test_extract_no_version_tail(self): text = b"""\ From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001 From: Jelmer Vernooij Date: Thu, 15 Apr 2010 15:40:28 +0200 Subject: [Dulwich-users] [PATCH] Added unit tests for dulwich.object_store.tree_lookup_path. From: Jelmer Vernooij --- pixmaps/prey.ico | Bin 9662 -> 9662 bytes 1 files changed, 0 insertions(+), 0 deletions(-) mode change 100755 => 100644 pixmaps/prey.ico """ c, diff, version = git_am_patch_split(BytesIO(text), "utf-8") self.assertEqual(None, version) def test_extract_mercurial(self): raise SkipTest( "git_am_patch_split doesn't handle Mercurial patches " "properly yet" ) expected_diff = """\ diff --git a/dulwich/tests/test_patch.py b/dulwich/tests/test_patch.py --- a/dulwich/tests/test_patch.py +++ b/dulwich/tests/test_patch.py @@ -158,7 +158,7 @@ ''' c, diff, version = git_am_patch_split(BytesIO(text)) - self.assertIs(None, version) + self.assertEqual(None, version) class DiffTests(TestCase): """ # noqa: W291,W293 text = ( """\ From dulwich-users-bounces+jelmer=samba.org@lists.launchpad.net \ Mon Nov 29 00:58:18 2010 Date: Sun, 28 Nov 2010 17:57:27 -0600 From: Augie Fackler To: dulwich-users Subject: [Dulwich-users] [PATCH] test_patch: fix tests on Python 2.6 Content-Transfer-Encoding: 8bit Change-Id: I5e51313d4ae3a65c3f00c665002a7489121bb0d6 %s _______________________________________________ Mailing list: https://launchpad.net/~dulwich-users Post to : dulwich-users@lists.launchpad.net Unsubscribe : https://launchpad.net/~dulwich-users More help : https://help.launchpad.net/ListHelp """ % expected_diff ) # noqa: W291 c, diff, version = git_am_patch_split(BytesIO(text)) self.assertEqual(expected_diff, diff) self.assertEqual(None, version) class DiffTests(TestCase): """Tests for write_blob_diff and write_tree_diff.""" def test_blob_diff(self): f = BytesIO() write_blob_diff( f, (b"foo.txt", 0o644, Blob.from_string(b"old\nsame\n")), (b"bar.txt", 0o644, Blob.from_string(b"new\nsame\n")), ) self.assertEqual( [ b"diff --git a/foo.txt b/bar.txt", b"index 3b0f961..a116b51 644", b"--- a/foo.txt", b"+++ b/bar.txt", b"@@ -1,2 +1,2 @@", b"-old", b"+new", b" same", ], f.getvalue().splitlines(), ) def test_blob_add(self): f = BytesIO() write_blob_diff( - f, (None, None, None), (b"bar.txt", 0o644, Blob.from_string(b"new\nsame\n")) + f, + (None, None, None), + (b"bar.txt", 0o644, Blob.from_string(b"new\nsame\n")), ) self.assertEqual( [ b"diff --git a/bar.txt b/bar.txt", b"new file mode 644", b"index 0000000..a116b51", b"--- /dev/null", b"+++ b/bar.txt", b"@@ -0,0 +1,2 @@", b"+new", b"+same", ], f.getvalue().splitlines(), ) def test_blob_remove(self): f = BytesIO() write_blob_diff( - f, (b"bar.txt", 0o644, Blob.from_string(b"new\nsame\n")), (None, None, None) + f, + (b"bar.txt", 0o644, Blob.from_string(b"new\nsame\n")), + (None, None, None), ) self.assertEqual( [ b"diff --git a/bar.txt b/bar.txt", b"deleted file mode 644", b"index a116b51..0000000", b"--- a/bar.txt", b"+++ /dev/null", b"@@ -1,2 +0,0 @@", b"-new", b"-same", ], f.getvalue().splitlines(), ) def test_tree_diff(self): f = BytesIO() store = MemoryObjectStore() added = Blob.from_string(b"add\n") removed = Blob.from_string(b"removed\n") changed1 = Blob.from_string(b"unchanged\nremoved\n") changed2 = Blob.from_string(b"unchanged\nadded\n") unchanged = Blob.from_string(b"unchanged\n") tree1 = Tree() tree1.add(b"removed.txt", 0o644, removed.id) tree1.add(b"changed.txt", 0o644, changed1.id) tree1.add(b"unchanged.txt", 0o644, changed1.id) tree2 = Tree() tree2.add(b"added.txt", 0o644, added.id) tree2.add(b"changed.txt", 0o644, changed2.id) tree2.add(b"unchanged.txt", 0o644, changed1.id) store.add_objects( [ (o, None) - for o in [tree1, tree2, added, removed, changed1, changed2, unchanged] + for o in [ + tree1, + tree2, + added, + removed, + changed1, + changed2, + unchanged, + ] ] ) write_tree_diff(f, store, tree1.id, tree2.id) self.assertEqual( [ b"diff --git a/added.txt b/added.txt", b"new file mode 644", b"index 0000000..76d4bb8", b"--- /dev/null", b"+++ b/added.txt", b"@@ -0,0 +1 @@", b"+add", b"diff --git a/changed.txt b/changed.txt", b"index bf84e48..1be2436 644", b"--- a/changed.txt", b"+++ b/changed.txt", b"@@ -1,2 +1,2 @@", b" unchanged", b"-removed", b"+added", b"diff --git a/removed.txt b/removed.txt", b"deleted file mode 644", b"index 2c3f0b3..0000000", b"--- a/removed.txt", b"+++ /dev/null", b"@@ -1 +0,0 @@", b"-removed", ], f.getvalue().splitlines(), ) def test_tree_diff_submodule(self): f = BytesIO() store = MemoryObjectStore() tree1 = Tree() tree1.add( - b"asubmodule", S_IFGITLINK, b"06d0bdd9e2e20377b3180e4986b14c8549b393e4" + b"asubmodule", + S_IFGITLINK, + b"06d0bdd9e2e20377b3180e4986b14c8549b393e4", ) tree2 = Tree() tree2.add( - b"asubmodule", S_IFGITLINK, b"cc975646af69f279396d4d5e1379ac6af80ee637" + b"asubmodule", + S_IFGITLINK, + b"cc975646af69f279396d4d5e1379ac6af80ee637", ) store.add_objects([(o, None) for o in [tree1, tree2]]) write_tree_diff(f, store, tree1.id, tree2.id) self.assertEqual( [ b"diff --git a/asubmodule b/asubmodule", b"index 06d0bdd..cc97564 160000", b"--- a/asubmodule", b"+++ b/asubmodule", b"@@ -1 +1 @@", b"-Subproject commit 06d0bdd9e2e20377b3180e4986b14c8549b393e4", b"+Subproject commit cc975646af69f279396d4d5e1379ac6af80ee637", ], f.getvalue().splitlines(), ) def test_object_diff_blob(self): f = BytesIO() b1 = Blob.from_string(b"old\nsame\n") b2 = Blob.from_string(b"new\nsame\n") store = MemoryObjectStore() store.add_objects([(b1, None), (b2, None)]) write_object_diff( f, store, (b"foo.txt", 0o644, b1.id), (b"bar.txt", 0o644, b2.id) ) self.assertEqual( [ b"diff --git a/foo.txt b/bar.txt", b"index 3b0f961..a116b51 644", b"--- a/foo.txt", b"+++ b/bar.txt", b"@@ -1,2 +1,2 @@", b"-old", b"+new", b" same", ], f.getvalue().splitlines(), ) def test_object_diff_add_blob(self): f = BytesIO() store = MemoryObjectStore() b2 = Blob.from_string(b"new\nsame\n") store.add_object(b2) write_object_diff(f, store, (None, None, None), (b"bar.txt", 0o644, b2.id)) self.assertEqual( [ b"diff --git a/bar.txt b/bar.txt", b"new file mode 644", b"index 0000000..a116b51", b"--- /dev/null", b"+++ b/bar.txt", b"@@ -0,0 +1,2 @@", b"+new", b"+same", ], f.getvalue().splitlines(), ) def test_object_diff_remove_blob(self): f = BytesIO() b1 = Blob.from_string(b"new\nsame\n") store = MemoryObjectStore() store.add_object(b1) write_object_diff(f, store, (b"bar.txt", 0o644, b1.id), (None, None, None)) self.assertEqual( [ b"diff --git a/bar.txt b/bar.txt", b"deleted file mode 644", b"index a116b51..0000000", b"--- a/bar.txt", b"+++ /dev/null", b"@@ -1,2 +0,0 @@", b"-new", b"-same", ], f.getvalue().splitlines(), ) def test_object_diff_bin_blob_force(self): f = BytesIO() # Prepare two slightly different PNG headers b1 = Blob.from_string( b"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a" b"\x00\x00\x00\x0d\x49\x48\x44\x52" b"\x00\x00\x01\xd5\x00\x00\x00\x9f" b"\x08\x04\x00\x00\x00\x05\x04\x8b" ) b2 = Blob.from_string( b"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a" b"\x00\x00\x00\x0d\x49\x48\x44\x52" b"\x00\x00\x01\xd5\x00\x00\x00\x9f" b"\x08\x03\x00\x00\x00\x98\xd3\xb3" ) store = MemoryObjectStore() store.add_objects([(b1, None), (b2, None)]) write_object_diff( f, store, (b"foo.png", 0o644, b1.id), (b"bar.png", 0o644, b2.id), diff_binary=True, ) self.assertEqual( [ b"diff --git a/foo.png b/bar.png", b"index f73e47d..06364b7 644", b"--- a/foo.png", b"+++ b/bar.png", b"@@ -1,4 +1,4 @@", b" \x89PNG", b" \x1a", b" \x00\x00\x00", b"-IHDR\x00\x00\x01\xd5\x00\x00\x00" b"\x9f\x08\x04\x00\x00\x00\x05\x04\x8b", b"\\ No newline at end of file", b"+IHDR\x00\x00\x01\xd5\x00\x00\x00\x9f" b"\x08\x03\x00\x00\x00\x98\xd3\xb3", b"\\ No newline at end of file", ], f.getvalue().splitlines(), ) def test_object_diff_bin_blob(self): f = BytesIO() # Prepare two slightly different PNG headers b1 = Blob.from_string( b"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a" b"\x00\x00\x00\x0d\x49\x48\x44\x52" b"\x00\x00\x01\xd5\x00\x00\x00\x9f" b"\x08\x04\x00\x00\x00\x05\x04\x8b" ) b2 = Blob.from_string( b"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a" b"\x00\x00\x00\x0d\x49\x48\x44\x52" b"\x00\x00\x01\xd5\x00\x00\x00\x9f" b"\x08\x03\x00\x00\x00\x98\xd3\xb3" ) store = MemoryObjectStore() store.add_objects([(b1, None), (b2, None)]) write_object_diff( f, store, (b"foo.png", 0o644, b1.id), (b"bar.png", 0o644, b2.id) ) self.assertEqual( [ b"diff --git a/foo.png b/bar.png", b"index f73e47d..06364b7 644", b"Binary files a/foo.png and b/bar.png differ", ], f.getvalue().splitlines(), ) def test_object_diff_add_bin_blob(self): f = BytesIO() b2 = Blob.from_string( b"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a" b"\x00\x00\x00\x0d\x49\x48\x44\x52" b"\x00\x00\x01\xd5\x00\x00\x00\x9f" b"\x08\x03\x00\x00\x00\x98\xd3\xb3" ) store = MemoryObjectStore() store.add_object(b2) write_object_diff(f, store, (None, None, None), (b"bar.png", 0o644, b2.id)) self.assertEqual( [ b"diff --git a/bar.png b/bar.png", b"new file mode 644", b"index 0000000..06364b7", b"Binary files /dev/null and b/bar.png differ", ], f.getvalue().splitlines(), ) def test_object_diff_remove_bin_blob(self): f = BytesIO() b1 = Blob.from_string( b"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a" b"\x00\x00\x00\x0d\x49\x48\x44\x52" b"\x00\x00\x01\xd5\x00\x00\x00\x9f" b"\x08\x04\x00\x00\x00\x05\x04\x8b" ) store = MemoryObjectStore() store.add_object(b1) write_object_diff(f, store, (b"foo.png", 0o644, b1.id), (None, None, None)) self.assertEqual( [ b"diff --git a/foo.png b/foo.png", b"deleted file mode 644", b"index f73e47d..0000000", b"Binary files a/foo.png and /dev/null differ", ], f.getvalue().splitlines(), ) def test_object_diff_kind_change(self): f = BytesIO() b1 = Blob.from_string(b"new\nsame\n") store = MemoryObjectStore() store.add_object(b1) write_object_diff( f, store, (b"bar.txt", 0o644, b1.id), - (b"bar.txt", 0o160000, b"06d0bdd9e2e20377b3180e4986b14c8549b393e4"), + ( + b"bar.txt", + 0o160000, + b"06d0bdd9e2e20377b3180e4986b14c8549b393e4", + ), ) self.assertEqual( [ b"diff --git a/bar.txt b/bar.txt", b"old file mode 644", b"new file mode 160000", b"index a116b51..06d0bdd 160000", b"--- a/bar.txt", b"+++ b/bar.txt", b"@@ -1,2 +1 @@", b"-new", b"-same", b"+Subproject commit 06d0bdd9e2e20377b3180e4986b14c8549b393e4", ], f.getvalue().splitlines(), ) class GetSummaryTests(TestCase): def test_simple(self): c = Commit() c.committer = c.author = b"Jelmer " c.commit_time = c.author_time = 1271350201 c.commit_timezone = c.author_timezone = 0 c.message = b"This is the first line\nAnd this is the second line.\n" c.tree = Tree().id self.assertEqual("This-is-the-first-line", get_summary(c)) diff --git a/dulwich/tests/test_porcelain.py b/dulwich/tests/test_porcelain.py index f90dea99..aa8fe00f 100644 --- a/dulwich/tests/test_porcelain.py +++ b/dulwich/tests/test_porcelain.py @@ -1,2311 +1,2352 @@ # test_porcelain.py -- porcelain tests # Copyright (C) 2013 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Tests for dulwich.porcelain.""" from io import BytesIO, StringIO import os import re import shutil import stat import tarfile import tempfile import time from dulwich import porcelain from dulwich.diff_tree import tree_changes from dulwich.errors import CommitError from dulwich.objects import ( Blob, Tag, Tree, ZERO_SHA, ) from dulwich.repo import ( NoIndexPresent, Repo, ) from dulwich.tests import ( TestCase, ) from dulwich.tests.utils import ( build_commit_graph, make_commit, make_object, ) def flat_walk_dir(dir_to_walk): for dirpath, _, filenames in os.walk(dir_to_walk): rel_dirpath = os.path.relpath(dirpath, dir_to_walk) if not dirpath == dir_to_walk: yield rel_dirpath for filename in filenames: if dirpath == dir_to_walk: yield filename else: yield os.path.join(rel_dirpath, filename) class PorcelainTestCase(TestCase): def setUp(self): super(PorcelainTestCase, self).setUp() self.test_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.test_dir) self.repo_path = os.path.join(self.test_dir, "repo") self.repo = Repo.init(self.repo_path, mkdir=True) self.addCleanup(self.repo.close) class ArchiveTests(PorcelainTestCase): """Tests for the archive command.""" def test_simple(self): c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) self.repo.refs[b"refs/heads/master"] = c3.id out = BytesIO() err = BytesIO() porcelain.archive( self.repo.path, b"refs/heads/master", outstream=out, errstream=err ) self.assertEqual(b"", err.getvalue()) tf = tarfile.TarFile(fileobj=out) self.addCleanup(tf.close) self.assertEqual([], tf.getnames()) class UpdateServerInfoTests(PorcelainTestCase): def test_simple(self): c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) self.repo.refs[b"refs/heads/foo"] = c3.id porcelain.update_server_info(self.repo.path) self.assertTrue( os.path.exists(os.path.join(self.repo.controldir(), "info", "refs")) ) class CommitTests(PorcelainTestCase): def test_custom_author(self): c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) self.repo.refs[b"refs/heads/foo"] = c3.id sha = porcelain.commit( self.repo.path, message=b"Some message", author=b"Joe ", committer=b"Bob ", ) self.assertTrue(isinstance(sha, bytes)) self.assertEqual(len(sha), 40) def test_unicode(self): c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) self.repo.refs[b"refs/heads/foo"] = c3.id sha = porcelain.commit( self.repo.path, message="Some message", author="Joe ", committer="Bob ", ) self.assertTrue(isinstance(sha, bytes)) self.assertEqual(len(sha), 40) def test_no_verify(self): if os.name != "posix": self.skipTest("shell hook tests requires POSIX shell") self.assertTrue(os.path.exists("/bin/sh")) hooks_dir = os.path.join(self.repo.controldir(), "hooks") os.makedirs(hooks_dir, exist_ok=True) self.addCleanup(shutil.rmtree, hooks_dir) c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) hook_fail = "#!/bin/sh\nexit 1" # hooks are executed in pre-commit, commit-msg order # test commit-msg failure first, then pre-commit failure, then # no_verify to skip both hooks commit_msg = os.path.join(hooks_dir, "commit-msg") with open(commit_msg, "w") as f: f.write(hook_fail) os.chmod(commit_msg, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) with self.assertRaises(CommitError): porcelain.commit( self.repo.path, message="Some message", author="Joe ", committer="Bob ", ) pre_commit = os.path.join(hooks_dir, "pre-commit") with open(pre_commit, "w") as f: f.write(hook_fail) os.chmod(pre_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) with self.assertRaises(CommitError): porcelain.commit( self.repo.path, message="Some message", author="Joe ", committer="Bob ", ) sha = porcelain.commit( self.repo.path, message="Some message", author="Joe ", committer="Bob ", no_verify=True, ) self.assertTrue(isinstance(sha, bytes)) self.assertEqual(len(sha), 40) class CleanTests(PorcelainTestCase): def put_files(self, tracked, ignored, untracked, empty_dirs): """Put the described files in the wd""" all_files = tracked | ignored | untracked for file_path in all_files: abs_path = os.path.join(self.repo.path, file_path) # File may need to be written in a dir that doesn't exist yet, so # create the parent dir(s) as necessary parent_dir = os.path.dirname(abs_path) try: os.makedirs(parent_dir) except FileExistsError: pass with open(abs_path, "w") as f: f.write("") with open(os.path.join(self.repo.path, ".gitignore"), "w") as f: f.writelines(ignored) for dir_path in empty_dirs: os.mkdir(os.path.join(self.repo.path, "empty_dir")) files_to_add = [os.path.join(self.repo.path, t) for t in tracked] porcelain.add(repo=self.repo.path, paths=files_to_add) porcelain.commit(repo=self.repo.path, message="init commit") def assert_wd(self, expected_paths): """Assert paths of files and dirs in wd are same as expected_paths""" control_dir_rel = os.path.relpath(self.repo._controldir, self.repo.path) # normalize paths to simplify comparison across platforms found_paths = { os.path.normpath(p) for p in flat_walk_dir(self.repo.path) if not p.split(os.sep)[0] == control_dir_rel } norm_expected_paths = {os.path.normpath(p) for p in expected_paths} self.assertEqual(found_paths, norm_expected_paths) def test_from_root(self): self.put_files( tracked={"tracked_file", "tracked_dir/tracked_file", ".gitignore"}, ignored={"ignored_file"}, untracked={ "untracked_file", "tracked_dir/untracked_dir/untracked_file", "untracked_dir/untracked_dir/untracked_file", }, empty_dirs={"empty_dir"}, ) porcelain.clean(repo=self.repo.path, target_dir=self.repo.path) self.assert_wd( { "tracked_file", "tracked_dir/tracked_file", ".gitignore", "ignored_file", "tracked_dir", } ) def test_from_subdir(self): self.put_files( tracked={"tracked_file", "tracked_dir/tracked_file", ".gitignore"}, ignored={"ignored_file"}, untracked={ "untracked_file", "tracked_dir/untracked_dir/untracked_file", "untracked_dir/untracked_dir/untracked_file", }, empty_dirs={"empty_dir"}, ) porcelain.clean( - repo=self.repo, target_dir=os.path.join(self.repo.path, "untracked_dir") + repo=self.repo, + target_dir=os.path.join(self.repo.path, "untracked_dir"), ) self.assert_wd( { "tracked_file", "tracked_dir/tracked_file", ".gitignore", "ignored_file", "untracked_file", "tracked_dir/untracked_dir/untracked_file", "empty_dir", "untracked_dir", "tracked_dir", "tracked_dir/untracked_dir", } ) class CloneTests(PorcelainTestCase): def test_simple_local(self): f1_1 = make_object(Blob, data=b"f1") commit_spec = [[1], [2, 1], [3, 1, 2]] trees = { 1: [(b"f1", f1_1), (b"f2", f1_1)], 2: [(b"f1", f1_1), (b"f2", f1_1)], 3: [(b"f1", f1_1), (b"f2", f1_1)], } c1, c2, c3 = build_commit_graph(self.repo.object_store, commit_spec, trees) self.repo.refs[b"refs/heads/master"] = c3.id self.repo.refs[b"refs/tags/foo"] = c3.id target_path = tempfile.mkdtemp() errstream = BytesIO() self.addCleanup(shutil.rmtree, target_path) r = porcelain.clone( self.repo.path, target_path, checkout=False, errstream=errstream ) self.addCleanup(r.close) self.assertEqual(r.path, target_path) target_repo = Repo(target_path) self.assertEqual(0, len(target_repo.open_index())) self.assertEqual(c3.id, target_repo.refs[b"refs/tags/foo"]) self.assertTrue(b"f1" not in os.listdir(target_path)) self.assertTrue(b"f2" not in os.listdir(target_path)) c = r.get_config() encoded_path = self.repo.path if not isinstance(encoded_path, bytes): encoded_path = encoded_path.encode("utf-8") self.assertEqual(encoded_path, c.get((b"remote", b"origin"), b"url")) self.assertEqual( b"+refs/heads/*:refs/remotes/origin/*", c.get((b"remote", b"origin"), b"fetch"), ) def test_simple_local_with_checkout(self): f1_1 = make_object(Blob, data=b"f1") commit_spec = [[1], [2, 1], [3, 1, 2]] trees = { 1: [(b"f1", f1_1), (b"f2", f1_1)], 2: [(b"f1", f1_1), (b"f2", f1_1)], 3: [(b"f1", f1_1), (b"f2", f1_1)], } c1, c2, c3 = build_commit_graph(self.repo.object_store, commit_spec, trees) self.repo.refs[b"refs/heads/master"] = c3.id target_path = tempfile.mkdtemp() errstream = BytesIO() self.addCleanup(shutil.rmtree, target_path) with porcelain.clone( self.repo.path, target_path, checkout=True, errstream=errstream ) as r: self.assertEqual(r.path, target_path) with Repo(target_path) as r: self.assertEqual(r.head(), c3.id) self.assertTrue("f1" in os.listdir(target_path)) self.assertTrue("f2" in os.listdir(target_path)) def test_bare_local_with_checkout(self): f1_1 = make_object(Blob, data=b"f1") commit_spec = [[1], [2, 1], [3, 1, 2]] trees = { 1: [(b"f1", f1_1), (b"f2", f1_1)], 2: [(b"f1", f1_1), (b"f2", f1_1)], 3: [(b"f1", f1_1), (b"f2", f1_1)], } c1, c2, c3 = build_commit_graph(self.repo.object_store, commit_spec, trees) self.repo.refs[b"refs/heads/master"] = c3.id target_path = tempfile.mkdtemp() errstream = BytesIO() self.addCleanup(shutil.rmtree, target_path) with porcelain.clone( self.repo.path, target_path, bare=True, errstream=errstream ) as r: self.assertEqual(r.path, target_path) with Repo(target_path) as r: r.head() self.assertRaises(NoIndexPresent, r.open_index) self.assertFalse(b"f1" in os.listdir(target_path)) self.assertFalse(b"f2" in os.listdir(target_path)) def test_no_checkout_with_bare(self): f1_1 = make_object(Blob, data=b"f1") commit_spec = [[1]] trees = {1: [(b"f1", f1_1), (b"f2", f1_1)]} (c1,) = build_commit_graph(self.repo.object_store, commit_spec, trees) self.repo.refs[b"refs/heads/master"] = c1.id self.repo.refs[b"HEAD"] = c1.id target_path = tempfile.mkdtemp() errstream = BytesIO() self.addCleanup(shutil.rmtree, target_path) self.assertRaises( porcelain.Error, porcelain.clone, self.repo.path, target_path, checkout=True, bare=True, errstream=errstream, ) def test_no_head_no_checkout(self): f1_1 = make_object(Blob, data=b"f1") commit_spec = [[1]] trees = {1: [(b"f1", f1_1), (b"f2", f1_1)]} (c1,) = build_commit_graph(self.repo.object_store, commit_spec, trees) self.repo.refs[b"refs/heads/master"] = c1.id target_path = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, target_path) errstream = BytesIO() r = porcelain.clone( self.repo.path, target_path, checkout=True, errstream=errstream ) r.close() def test_no_head_no_checkout_outstream_errstream_autofallback(self): f1_1 = make_object(Blob, data=b"f1") commit_spec = [[1]] trees = {1: [(b"f1", f1_1), (b"f2", f1_1)]} (c1,) = build_commit_graph(self.repo.object_store, commit_spec, trees) self.repo.refs[b"refs/heads/master"] = c1.id target_path = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, target_path) errstream = porcelain.NoneStream() r = porcelain.clone( self.repo.path, target_path, checkout=True, errstream=errstream ) r.close() def test_source_broken(self): target_path = tempfile.mkdtemp() self.assertRaises(Exception, porcelain.clone, "/nonexistant/repo", target_path) self.assertFalse(os.path.exists(target_path)) class InitTests(TestCase): def test_non_bare(self): repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) porcelain.init(repo_dir) def test_bare(self): repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) porcelain.init(repo_dir, bare=True) class AddTests(PorcelainTestCase): def test_add_default_paths(self): # create a file for initial commit fullpath = os.path.join(self.repo.path, "blah") with open(fullpath, "w") as f: f.write("\n") porcelain.add(repo=self.repo.path, paths=[fullpath]) porcelain.commit( repo=self.repo.path, message=b"test", author=b"test ", committer=b"test ", ) # Add a second test file and a file in a directory with open(os.path.join(self.repo.path, "foo"), "w") as f: f.write("\n") os.mkdir(os.path.join(self.repo.path, "adir")) with open(os.path.join(self.repo.path, "adir", "afile"), "w") as f: f.write("\n") cwd = os.getcwd() try: os.chdir(self.repo.path) self.assertEqual(set(["foo", "blah", "adir", ".git"]), set(os.listdir("."))) self.assertEqual( (["foo", os.path.join("adir", "afile")], set()), porcelain.add(self.repo.path), ) finally: os.chdir(cwd) # Check that foo was added and nothing in .git was modified index = self.repo.open_index() self.assertEqual(sorted(index), [b"adir/afile", b"blah", b"foo"]) def test_add_default_paths_subdir(self): os.mkdir(os.path.join(self.repo.path, "foo")) with open(os.path.join(self.repo.path, "blah"), "w") as f: f.write("\n") with open(os.path.join(self.repo.path, "foo", "blie"), "w") as f: f.write("\n") cwd = os.getcwd() try: os.chdir(os.path.join(self.repo.path, "foo")) porcelain.add(repo=self.repo.path) porcelain.commit( repo=self.repo.path, message=b"test", author=b"test ", committer=b"test ", ) finally: os.chdir(cwd) index = self.repo.open_index() self.assertEqual(sorted(index), [b"foo/blie"]) def test_add_file(self): fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") porcelain.add(self.repo.path, paths=[fullpath]) self.assertIn(b"foo", self.repo.open_index()) def test_add_ignored(self): with open(os.path.join(self.repo.path, ".gitignore"), "w") as f: f.write("foo") with open(os.path.join(self.repo.path, "foo"), "w") as f: f.write("BAR") with open(os.path.join(self.repo.path, "bar"), "w") as f: f.write("BAR") (added, ignored) = porcelain.add( self.repo.path, paths=[ os.path.join(self.repo.path, "foo"), os.path.join(self.repo.path, "bar"), ], ) self.assertIn(b"bar", self.repo.open_index()) self.assertEqual(set(["bar"]), set(added)) self.assertEqual(set(["foo"]), ignored) def test_add_file_absolute_path(self): # Absolute paths are (not yet) supported with open(os.path.join(self.repo.path, "foo"), "w") as f: f.write("BAR") porcelain.add(self.repo, paths=[os.path.join(self.repo.path, "foo")]) self.assertIn(b"foo", self.repo.open_index()) def test_add_not_in_repo(self): with open(os.path.join(self.test_dir, "foo"), "w") as f: f.write("BAR") self.assertRaises( ValueError, porcelain.add, self.repo, paths=[os.path.join(self.test_dir, "foo")], ) self.assertRaises( - (ValueError, FileNotFoundError), porcelain.add, self.repo, paths=["../foo"] + (ValueError, FileNotFoundError), + porcelain.add, + self.repo, + paths=["../foo"], ) self.assertEqual([], list(self.repo.open_index())) def test_add_file_clrf_conversion(self): # Set the right configuration to the repo c = self.repo.get_config() c.set("core", "autocrlf", "input") c.write_to_path() # Add a file with CRLF line-ending fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "wb") as f: f.write(b"line1\r\nline2") porcelain.add(self.repo.path, paths=[fullpath]) # The line-endings should have been converted to LF index = self.repo.open_index() self.assertIn(b"foo", index) entry = index[b"foo"] blob = self.repo[entry.sha] self.assertEqual(blob.data, b"line1\nline2") class RemoveTests(PorcelainTestCase): def test_remove_file(self): fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") porcelain.add(self.repo.path, paths=[fullpath]) porcelain.commit( repo=self.repo, message=b"test", author=b"test ", committer=b"test ", ) self.assertTrue(os.path.exists(os.path.join(self.repo.path, "foo"))) cwd = os.getcwd() try: os.chdir(self.repo.path) porcelain.remove(self.repo.path, paths=["foo"]) finally: os.chdir(cwd) self.assertFalse(os.path.exists(os.path.join(self.repo.path, "foo"))) def test_remove_file_staged(self): fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") cwd = os.getcwd() try: os.chdir(self.repo.path) porcelain.add(self.repo.path, paths=[fullpath]) self.assertRaises(Exception, porcelain.rm, self.repo.path, paths=["foo"]) finally: os.chdir(cwd) def test_remove_file_removed_on_disk(self): fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") porcelain.add(self.repo.path, paths=[fullpath]) cwd = os.getcwd() try: os.chdir(self.repo.path) os.remove(fullpath) porcelain.remove(self.repo.path, paths=["foo"]) finally: os.chdir(cwd) self.assertFalse(os.path.exists(os.path.join(self.repo.path, "foo"))) class LogTests(PorcelainTestCase): def test_simple(self): c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) self.repo.refs[b"HEAD"] = c3.id outstream = StringIO() porcelain.log(self.repo.path, outstream=outstream) self.assertEqual(3, outstream.getvalue().count("-" * 50)) def test_max_entries(self): c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) self.repo.refs[b"HEAD"] = c3.id outstream = StringIO() porcelain.log(self.repo.path, outstream=outstream, max_entries=1) self.assertEqual(1, outstream.getvalue().count("-" * 50)) class ShowTests(PorcelainTestCase): def test_nolist(self): c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) self.repo.refs[b"HEAD"] = c3.id outstream = StringIO() porcelain.show(self.repo.path, objects=c3.id, outstream=outstream) self.assertTrue(outstream.getvalue().startswith("-" * 50)) def test_simple(self): c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) self.repo.refs[b"HEAD"] = c3.id outstream = StringIO() porcelain.show(self.repo.path, objects=[c3.id], outstream=outstream) self.assertTrue(outstream.getvalue().startswith("-" * 50)) def test_blob(self): b = Blob.from_string(b"The Foo\n") self.repo.object_store.add_object(b) outstream = StringIO() porcelain.show(self.repo.path, objects=[b.id], outstream=outstream) self.assertEqual(outstream.getvalue(), "The Foo\n") def test_commit_no_parent(self): a = Blob.from_string(b"The Foo\n") ta = Tree() ta.add(b"somename", 0o100644, a.id) ca = make_commit(tree=ta.id) self.repo.object_store.add_objects([(a, None), (ta, None), (ca, None)]) outstream = StringIO() porcelain.show(self.repo.path, objects=[ca.id], outstream=outstream) self.assertMultiLineEqual( outstream.getvalue(), """\ -------------------------------------------------- commit: 344da06c1bb85901270b3e8875c988a027ec087d Author: Test Author Committer: Test Committer Date: Fri Jan 01 2010 00:00:00 +0000 Test message. diff --git a/somename b/somename new file mode 100644 index 0000000..ea5c7bf --- /dev/null +++ b/somename @@ -0,0 +1 @@ +The Foo """, ) def test_tag(self): a = Blob.from_string(b"The Foo\n") ta = Tree() ta.add(b"somename", 0o100644, a.id) ca = make_commit(tree=ta.id) self.repo.object_store.add_objects([(a, None), (ta, None), (ca, None)]) porcelain.tag_create( self.repo.path, b"tryme", b"foo ", b"bar", annotated=True, objectish=ca.id, tag_time=1552854211, tag_timezone=0, ) outstream = StringIO() porcelain.show(self.repo, objects=[b"refs/tags/tryme"], outstream=outstream) self.maxDiff = None self.assertMultiLineEqual( outstream.getvalue(), """\ Tagger: foo Date: Sun Mar 17 2019 20:23:31 +0000 bar -------------------------------------------------- commit: 344da06c1bb85901270b3e8875c988a027ec087d Author: Test Author Committer: Test Committer Date: Fri Jan 01 2010 00:00:00 +0000 Test message. diff --git a/somename b/somename new file mode 100644 index 0000000..ea5c7bf --- /dev/null +++ b/somename @@ -0,0 +1 @@ +The Foo """, ) def test_commit_with_change(self): a = Blob.from_string(b"The Foo\n") ta = Tree() ta.add(b"somename", 0o100644, a.id) ca = make_commit(tree=ta.id) b = Blob.from_string(b"The Bar\n") tb = Tree() tb.add(b"somename", 0o100644, b.id) cb = make_commit(tree=tb.id, parents=[ca.id]) self.repo.object_store.add_objects( - [(a, None), (b, None), (ta, None), (tb, None), (ca, None), (cb, None)] + [ + (a, None), + (b, None), + (ta, None), + (tb, None), + (ca, None), + (cb, None), + ] ) outstream = StringIO() porcelain.show(self.repo.path, objects=[cb.id], outstream=outstream) self.assertMultiLineEqual( outstream.getvalue(), """\ -------------------------------------------------- commit: 2c6b6c9cb72c130956657e1fdae58e5b103744fa Author: Test Author Committer: Test Committer Date: Fri Jan 01 2010 00:00:00 +0000 Test message. diff --git a/somename b/somename index ea5c7bf..fd38bcb 100644 --- a/somename +++ b/somename @@ -1 +1 @@ -The Foo +The Bar """, ) class SymbolicRefTests(PorcelainTestCase): def test_set_wrong_symbolic_ref(self): c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) self.repo.refs[b"HEAD"] = c3.id self.assertRaises( porcelain.Error, porcelain.symbolic_ref, self.repo.path, b"foobar" ) def test_set_force_wrong_symbolic_ref(self): c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) self.repo.refs[b"HEAD"] = c3.id porcelain.symbolic_ref(self.repo.path, b"force_foobar", force=True) # test if we actually changed the file with self.repo.get_named_file("HEAD") as f: new_ref = f.read() self.assertEqual(new_ref, b"ref: refs/heads/force_foobar\n") def test_set_symbolic_ref(self): c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) self.repo.refs[b"HEAD"] = c3.id porcelain.symbolic_ref(self.repo.path, b"master") def test_set_symbolic_ref_other_than_master(self): c1, c2, c3 = build_commit_graph( - self.repo.object_store, [[1], [2, 1], [3, 1, 2]], attrs=dict(refs="develop") + self.repo.object_store, + [[1], [2, 1], [3, 1, 2]], + attrs=dict(refs="develop"), ) self.repo.refs[b"HEAD"] = c3.id self.repo.refs[b"refs/heads/develop"] = c3.id porcelain.symbolic_ref(self.repo.path, b"develop") # test if we actually changed the file with self.repo.get_named_file("HEAD") as f: new_ref = f.read() self.assertEqual(new_ref, b"ref: refs/heads/develop\n") class DiffTreeTests(PorcelainTestCase): def test_empty(self): c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) self.repo.refs[b"HEAD"] = c3.id outstream = BytesIO() porcelain.diff_tree(self.repo.path, c2.tree, c3.tree, outstream=outstream) self.assertEqual(outstream.getvalue(), b"") class CommitTreeTests(PorcelainTestCase): def test_simple(self): c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) b = Blob() b.data = b"foo the bar" t = Tree() t.add(b"somename", 0o100644, b.id) self.repo.object_store.add_object(t) self.repo.object_store.add_object(b) sha = porcelain.commit_tree( self.repo.path, t.id, message=b"Withcommit.", author=b"Joe ", committer=b"Jane ", ) self.assertTrue(isinstance(sha, bytes)) self.assertEqual(len(sha), 40) class RevListTests(PorcelainTestCase): def test_simple(self): c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) outstream = BytesIO() porcelain.rev_list(self.repo.path, [c3.id], outstream=outstream) self.assertEqual( c3.id + b"\n" + c2.id + b"\n" + c1.id + b"\n", outstream.getvalue() ) class TagCreateTests(PorcelainTestCase): def test_annotated(self): c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) self.repo.refs[b"HEAD"] = c3.id porcelain.tag_create( - self.repo.path, b"tryme", b"foo ", b"bar", annotated=True + self.repo.path, + b"tryme", + b"foo ", + b"bar", + annotated=True, ) tags = self.repo.refs.as_dict(b"refs/tags") self.assertEqual(list(tags.keys()), [b"tryme"]) tag = self.repo[b"refs/tags/tryme"] self.assertTrue(isinstance(tag, Tag)) self.assertEqual(b"foo ", tag.tagger) self.assertEqual(b"bar", tag.message) self.assertLess(time.time() - tag.tag_time, 5) def test_unannotated(self): c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) self.repo.refs[b"HEAD"] = c3.id porcelain.tag_create(self.repo.path, b"tryme", annotated=False) tags = self.repo.refs.as_dict(b"refs/tags") self.assertEqual(list(tags.keys()), [b"tryme"]) self.repo[b"refs/tags/tryme"] self.assertEqual(list(tags.values()), [self.repo.head()]) def test_unannotated_unicode(self): c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) self.repo.refs[b"HEAD"] = c3.id porcelain.tag_create(self.repo.path, "tryme", annotated=False) tags = self.repo.refs.as_dict(b"refs/tags") self.assertEqual(list(tags.keys()), [b"tryme"]) self.repo[b"refs/tags/tryme"] self.assertEqual(list(tags.values()), [self.repo.head()]) class TagListTests(PorcelainTestCase): def test_empty(self): tags = porcelain.tag_list(self.repo.path) self.assertEqual([], tags) def test_simple(self): self.repo.refs[b"refs/tags/foo"] = b"aa" * 20 self.repo.refs[b"refs/tags/bar/bla"] = b"bb" * 20 tags = porcelain.tag_list(self.repo.path) self.assertEqual([b"bar/bla", b"foo"], tags) class TagDeleteTests(PorcelainTestCase): def test_simple(self): [c1] = build_commit_graph(self.repo.object_store, [[1]]) self.repo[b"HEAD"] = c1.id porcelain.tag_create(self.repo, b"foo") self.assertTrue(b"foo" in porcelain.tag_list(self.repo)) porcelain.tag_delete(self.repo, b"foo") self.assertFalse(b"foo" in porcelain.tag_list(self.repo)) class ResetTests(PorcelainTestCase): def test_hard_head(self): fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") porcelain.add(self.repo.path, paths=[fullpath]) porcelain.commit( self.repo.path, message=b"Some message", committer=b"Jane ", author=b"John ", ) with open(os.path.join(self.repo.path, "foo"), "wb") as f: f.write(b"OOH") porcelain.reset(self.repo, "hard", b"HEAD") index = self.repo.open_index() changes = list( tree_changes( - self.repo, index.commit(self.repo.object_store), self.repo[b"HEAD"].tree + self.repo, + index.commit(self.repo.object_store), + self.repo[b"HEAD"].tree, ) ) self.assertEqual([], changes) def test_hard_commit(self): fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") porcelain.add(self.repo.path, paths=[fullpath]) sha = porcelain.commit( self.repo.path, message=b"Some message", committer=b"Jane ", author=b"John ", ) with open(fullpath, "wb") as f: f.write(b"BAZ") porcelain.add(self.repo.path, paths=[fullpath]) porcelain.commit( self.repo.path, message=b"Some other message", committer=b"Jane ", author=b"John ", ) porcelain.reset(self.repo, "hard", sha) index = self.repo.open_index() changes = list( tree_changes( - self.repo, index.commit(self.repo.object_store), self.repo[sha].tree + self.repo, + index.commit(self.repo.object_store), + self.repo[sha].tree, ) ) self.assertEqual([], changes) class PushTests(PorcelainTestCase): def test_simple(self): """ Basic test of porcelain push where self.repo is the remote. First clone the remote, commit a file to the clone, then push the changes back to the remote. """ outstream = BytesIO() errstream = BytesIO() porcelain.commit( repo=self.repo.path, message=b"init", author=b"author ", committer=b"committer ", ) # Setup target repo cloned from temp test repo clone_path = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, clone_path) target_repo = porcelain.clone( self.repo.path, target=clone_path, errstream=errstream ) try: self.assertEqual(target_repo[b"HEAD"], self.repo[b"HEAD"]) finally: target_repo.close() # create a second file to be pushed back to origin handle, fullpath = tempfile.mkstemp(dir=clone_path) os.close(handle) porcelain.add(repo=clone_path, paths=[fullpath]) porcelain.commit( repo=clone_path, message=b"push", author=b"author ", committer=b"committer ", ) # Setup a non-checked out branch in the remote refs_path = b"refs/heads/foo" new_id = self.repo[b"HEAD"].id self.assertNotEqual(new_id, ZERO_SHA) self.repo.refs[refs_path] = new_id # Push to the remote porcelain.push( clone_path, "origin", b"HEAD:" + refs_path, outstream=outstream, errstream=errstream, ) self.assertEqual( - target_repo.refs[b"refs/remotes/origin/foo"], target_repo.refs[b"HEAD"] + target_repo.refs[b"refs/remotes/origin/foo"], + target_repo.refs[b"HEAD"], ) # Check that the target and source with Repo(clone_path) as r_clone: self.assertEqual( { b"HEAD": new_id, b"refs/heads/foo": r_clone[b"HEAD"].id, b"refs/heads/master": new_id, }, self.repo.get_refs(), ) self.assertEqual(r_clone[b"HEAD"].id, self.repo[refs_path].id) # Get the change in the target repo corresponding to the add # this will be in the foo branch. change = list( tree_changes( self.repo, self.repo[b"HEAD"].tree, self.repo[b"refs/heads/foo"].tree, ) )[0] self.assertEqual( os.path.basename(fullpath), change.new.path.decode("ascii") ) def test_local_missing(self): """Pushing a new branch.""" outstream = BytesIO() errstream = BytesIO() # Setup target repo cloned from temp test repo clone_path = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, clone_path) target_repo = porcelain.init(clone_path) target_repo.close() self.assertRaises( porcelain.Error, porcelain.push, self.repo, clone_path, b"HEAD:refs/heads/master", outstream=outstream, errstream=errstream, ) def test_new(self): """Pushing a new branch.""" outstream = BytesIO() errstream = BytesIO() # Setup target repo cloned from temp test repo clone_path = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, clone_path) target_repo = porcelain.init(clone_path) target_repo.close() # create a second file to be pushed back to origin handle, fullpath = tempfile.mkstemp(dir=clone_path) os.close(handle) porcelain.add(repo=clone_path, paths=[fullpath]) new_id = porcelain.commit( repo=self.repo, message=b"push", author=b"author ", committer=b"committer ", ) # Push to the remote porcelain.push( self.repo, clone_path, b"HEAD:refs/heads/master", outstream=outstream, errstream=errstream, ) with Repo(clone_path) as r_clone: self.assertEqual( { b"HEAD": new_id, b"refs/heads/master": new_id, }, r_clone.get_refs(), ) def test_delete(self): """Basic test of porcelain push, removing a branch.""" outstream = BytesIO() errstream = BytesIO() porcelain.commit( repo=self.repo.path, message=b"init", author=b"author ", committer=b"committer ", ) # Setup target repo cloned from temp test repo clone_path = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, clone_path) target_repo = porcelain.clone( self.repo.path, target=clone_path, errstream=errstream ) target_repo.close() # Setup a non-checked out branch in the remote refs_path = b"refs/heads/foo" new_id = self.repo[b"HEAD"].id self.assertNotEqual(new_id, ZERO_SHA) self.repo.refs[refs_path] = new_id # Push to the remote porcelain.push( clone_path, self.repo.path, b":" + refs_path, outstream=outstream, errstream=errstream, ) self.assertEqual( { b"HEAD": new_id, b"refs/heads/master": new_id, }, self.repo.get_refs(), ) def test_diverged(self): outstream = BytesIO() errstream = BytesIO() porcelain.commit( repo=self.repo.path, message=b"init", author=b"author ", committer=b"committer ", ) # Setup target repo cloned from temp test repo clone_path = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, clone_path) target_repo = porcelain.clone( self.repo.path, target=clone_path, errstream=errstream ) target_repo.close() remote_id = porcelain.commit( repo=self.repo.path, message=b"remote change", author=b"author ", committer=b"committer ", ) local_id = porcelain.commit( repo=clone_path, message=b"local change", author=b"author ", committer=b"committer ", ) outstream = BytesIO() errstream = BytesIO() # Push to the remote self.assertRaises( porcelain.DivergedBranches, porcelain.push, clone_path, self.repo.path, b"refs/heads/master", outstream=outstream, errstream=errstream, ) self.assertEqual( { b"HEAD": remote_id, b"refs/heads/master": remote_id, }, self.repo.get_refs(), ) self.assertEqual(b"", outstream.getvalue()) self.assertEqual(b"", errstream.getvalue()) outstream = BytesIO() errstream = BytesIO() # Push to the remote with --force porcelain.push( clone_path, self.repo.path, b"refs/heads/master", outstream=outstream, errstream=errstream, force=True, ) self.assertEqual( { b"HEAD": local_id, b"refs/heads/master": local_id, }, self.repo.get_refs(), ) self.assertEqual(b"", outstream.getvalue()) self.assertTrue(re.match(b"Push to .* successful.\n", errstream.getvalue())) class PullTests(PorcelainTestCase): def setUp(self): super(PullTests, self).setUp() # create a file for initial commit handle, fullpath = tempfile.mkstemp(dir=self.repo.path) os.close(handle) porcelain.add(repo=self.repo.path, paths=fullpath) porcelain.commit( repo=self.repo.path, message=b"test", author=b"test ", committer=b"test ", ) # Setup target repo self.target_path = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.target_path) target_repo = porcelain.clone( self.repo.path, target=self.target_path, errstream=BytesIO() ) target_repo.close() # create a second file to be pushed handle, fullpath = tempfile.mkstemp(dir=self.repo.path) os.close(handle) porcelain.add(repo=self.repo.path, paths=fullpath) porcelain.commit( repo=self.repo.path, message=b"test2", author=b"test2 ", committer=b"test2 ", ) self.assertIn(b"refs/heads/master", self.repo.refs) self.assertIn(b"refs/heads/master", target_repo.refs) def test_simple(self): outstream = BytesIO() errstream = BytesIO() # Pull changes into the cloned repo porcelain.pull( self.target_path, self.repo.path, b"refs/heads/master", outstream=outstream, errstream=errstream, ) # Check the target repo for pushed changes with Repo(self.target_path) as r: self.assertEqual(r[b"HEAD"].id, self.repo[b"HEAD"].id) def test_diverged(self): outstream = BytesIO() errstream = BytesIO() c3a = porcelain.commit( repo=self.target_path, message=b"test3a", author=b"test2 ", committer=b"test2 ", ) porcelain.commit( repo=self.repo.path, message=b"test3b", author=b"test2 ", committer=b"test2 ", ) # Pull changes into the cloned repo self.assertRaises( porcelain.DivergedBranches, porcelain.pull, self.target_path, self.repo.path, b"refs/heads/master", outstream=outstream, errstream=errstream, ) # Check the target repo for pushed changes with Repo(self.target_path) as r: self.assertEqual(r[b"refs/heads/master"].id, c3a) self.assertRaises( NotImplementedError, porcelain.pull, self.target_path, self.repo.path, b"refs/heads/master", outstream=outstream, errstream=errstream, fast_forward=False, ) # Check the target repo for pushed changes with Repo(self.target_path) as r: self.assertEqual(r[b"refs/heads/master"].id, c3a) def test_no_refspec(self): outstream = BytesIO() errstream = BytesIO() # Pull changes into the cloned repo porcelain.pull( - self.target_path, self.repo.path, outstream=outstream, errstream=errstream + self.target_path, + self.repo.path, + outstream=outstream, + errstream=errstream, ) # Check the target repo for pushed changes with Repo(self.target_path) as r: self.assertEqual(r[b"HEAD"].id, self.repo[b"HEAD"].id) def test_no_remote_location(self): outstream = BytesIO() errstream = BytesIO() # Pull changes into the cloned repo porcelain.pull( self.target_path, refspecs=b"refs/heads/master", outstream=outstream, errstream=errstream, ) # Check the target repo for pushed changes with Repo(self.target_path) as r: self.assertEqual(r[b"HEAD"].id, self.repo[b"HEAD"].id) class StatusTests(PorcelainTestCase): def test_empty(self): results = porcelain.status(self.repo) self.assertEqual({"add": [], "delete": [], "modify": []}, results.staged) self.assertEqual([], results.unstaged) def test_status_base(self): """Integration test for `status` functionality.""" # Commit a dummy file then modify it fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("origstuff") porcelain.add(repo=self.repo.path, paths=[fullpath]) porcelain.commit( repo=self.repo.path, message=b"test status", author=b"author ", committer=b"committer ", ) # modify access and modify time of path os.utime(fullpath, (0, 0)) with open(fullpath, "wb") as f: f.write(b"stuff") # Make a dummy file and stage it filename_add = "bar" fullpath = os.path.join(self.repo.path, filename_add) with open(fullpath, "w") as f: f.write("stuff") porcelain.add(repo=self.repo.path, paths=fullpath) results = porcelain.status(self.repo) self.assertEqual(results.staged["add"][0], filename_add.encode("ascii")) self.assertEqual(results.unstaged, [b"foo"]) def test_status_all(self): del_path = os.path.join(self.repo.path, "foo") mod_path = os.path.join(self.repo.path, "bar") add_path = os.path.join(self.repo.path, "baz") us_path = os.path.join(self.repo.path, "blye") ut_path = os.path.join(self.repo.path, "blyat") with open(del_path, "w") as f: f.write("origstuff") with open(mod_path, "w") as f: f.write("origstuff") with open(us_path, "w") as f: f.write("origstuff") porcelain.add(repo=self.repo.path, paths=[del_path, mod_path, us_path]) porcelain.commit( repo=self.repo.path, message=b"test status", author=b"author ", committer=b"committer ", ) porcelain.remove(self.repo.path, [del_path]) with open(add_path, "w") as f: f.write("origstuff") with open(mod_path, "w") as f: f.write("more_origstuff") with open(us_path, "w") as f: f.write("more_origstuff") porcelain.add(repo=self.repo.path, paths=[add_path, mod_path]) with open(us_path, "w") as f: f.write("\norigstuff") with open(ut_path, "w") as f: f.write("origstuff") results = porcelain.status(self.repo.path) self.assertDictEqual( - {"add": [b"baz"], "delete": [b"foo"], "modify": [b"bar"]}, results.staged + {"add": [b"baz"], "delete": [b"foo"], "modify": [b"bar"]}, + results.staged, ) self.assertListEqual(results.unstaged, [b"blye"]) self.assertListEqual(results.untracked, ["blyat"]) def test_status_crlf_mismatch(self): # First make a commit as if the file has been added on a Linux system # or with core.autocrlf=True file_path = os.path.join(self.repo.path, "crlf") with open(file_path, "wb") as f: f.write(b"line1\nline2") porcelain.add(repo=self.repo.path, paths=[file_path]) porcelain.commit( repo=self.repo.path, message=b"test status", author=b"author ", committer=b"committer ", ) # Then update the file as if it was created by CGit on a Windows # system with core.autocrlf=true with open(file_path, "wb") as f: f.write(b"line1\r\nline2") results = porcelain.status(self.repo) self.assertDictEqual({"add": [], "delete": [], "modify": []}, results.staged) self.assertListEqual(results.unstaged, [b"crlf"]) self.assertListEqual(results.untracked, []) def test_status_crlf_convert(self): # First make a commit as if the file has been added on a Linux system # or with core.autocrlf=True file_path = os.path.join(self.repo.path, "crlf") with open(file_path, "wb") as f: f.write(b"line1\nline2") porcelain.add(repo=self.repo.path, paths=[file_path]) porcelain.commit( repo=self.repo.path, message=b"test status", author=b"author ", committer=b"committer ", ) # Then update the file as if it was created by CGit on a Windows # system with core.autocrlf=true with open(file_path, "wb") as f: f.write(b"line1\r\nline2") # TODO: It should be set automatically by looking at the configuration c = self.repo.get_config() c.set("core", "autocrlf", True) c.write_to_path() results = porcelain.status(self.repo) self.assertDictEqual({"add": [], "delete": [], "modify": []}, results.staged) self.assertListEqual(results.unstaged, []) self.assertListEqual(results.untracked, []) def test_get_tree_changes_add(self): """Unit test for get_tree_changes add.""" # Make a dummy file, stage filename = "bar" fullpath = os.path.join(self.repo.path, filename) with open(fullpath, "w") as f: f.write("stuff") porcelain.add(repo=self.repo.path, paths=fullpath) porcelain.commit( repo=self.repo.path, message=b"test status", author=b"author ", committer=b"committer ", ) filename = "foo" fullpath = os.path.join(self.repo.path, filename) with open(fullpath, "w") as f: f.write("stuff") porcelain.add(repo=self.repo.path, paths=fullpath) changes = porcelain.get_tree_changes(self.repo.path) self.assertEqual(changes["add"][0], filename.encode("ascii")) self.assertEqual(len(changes["add"]), 1) self.assertEqual(len(changes["modify"]), 0) self.assertEqual(len(changes["delete"]), 0) def test_get_tree_changes_modify(self): """Unit test for get_tree_changes modify.""" # Make a dummy file, stage, commit, modify filename = "foo" fullpath = os.path.join(self.repo.path, filename) with open(fullpath, "w") as f: f.write("stuff") porcelain.add(repo=self.repo.path, paths=fullpath) porcelain.commit( repo=self.repo.path, message=b"test status", author=b"author ", committer=b"committer ", ) with open(fullpath, "w") as f: f.write("otherstuff") porcelain.add(repo=self.repo.path, paths=fullpath) changes = porcelain.get_tree_changes(self.repo.path) self.assertEqual(changes["modify"][0], filename.encode("ascii")) self.assertEqual(len(changes["add"]), 0) self.assertEqual(len(changes["modify"]), 1) self.assertEqual(len(changes["delete"]), 0) def test_get_tree_changes_delete(self): """Unit test for get_tree_changes delete.""" # Make a dummy file, stage, commit, remove filename = "foo" fullpath = os.path.join(self.repo.path, filename) with open(fullpath, "w") as f: f.write("stuff") porcelain.add(repo=self.repo.path, paths=fullpath) porcelain.commit( repo=self.repo.path, message=b"test status", author=b"author ", committer=b"committer ", ) cwd = os.getcwd() try: os.chdir(self.repo.path) porcelain.remove(repo=self.repo.path, paths=[filename]) finally: os.chdir(cwd) changes = porcelain.get_tree_changes(self.repo.path) self.assertEqual(changes["delete"][0], filename.encode("ascii")) self.assertEqual(len(changes["add"]), 0) self.assertEqual(len(changes["modify"]), 0) self.assertEqual(len(changes["delete"]), 1) def test_get_untracked_paths(self): with open(os.path.join(self.repo.path, ".gitignore"), "w") as f: f.write("ignored\n") with open(os.path.join(self.repo.path, "ignored"), "w") as f: f.write("blah\n") with open(os.path.join(self.repo.path, "notignored"), "w") as f: f.write("blah\n") self.assertEqual( set(["ignored", "notignored", ".gitignore"]), set( porcelain.get_untracked_paths( self.repo.path, self.repo.path, self.repo.open_index() ) ), ) self.assertEqual( set([".gitignore", "notignored"]), set(porcelain.status(self.repo).untracked), ) self.assertEqual( set([".gitignore", "notignored", "ignored"]), set(porcelain.status(self.repo, ignored=True).untracked), ) def test_get_untracked_paths_nested(self): with open(os.path.join(self.repo.path, ".gitignore"), "w") as f: f.write("nested/\n") with open(os.path.join(self.repo.path, "notignored"), "w") as f: f.write("blah\n") subrepo = Repo.init(os.path.join(self.repo.path, "nested"), mkdir=True) with open(os.path.join(subrepo.path, "ignored"), "w") as f: f.write("bleep\n") with open(os.path.join(subrepo.path, "with"), "w") as f: f.write("bloop\n") with open(os.path.join(subrepo.path, "manager"), "w") as f: f.write("blop\n") self.assertEqual( set([".gitignore", "notignored"]), set( porcelain.get_untracked_paths( self.repo.path, self.repo.path, self.repo.open_index() ) ), ) self.assertEqual( set(["ignored", "with", "manager"]), set( porcelain.get_untracked_paths( subrepo.path, subrepo.path, subrepo.open_index() ) ), ) self.assertEqual( set( [ os.path.join("nested", "ignored"), os.path.join("nested", "with"), os.path.join("nested", "manager"), ] ), set( porcelain.get_untracked_paths( self.repo.path, subrepo.path, self.repo.open_index(), exclude_ignored=False, ) ), ) self.assertEqual( set([]), set( porcelain.get_untracked_paths( self.repo.path, subrepo.path, self.repo.open_index(), exclude_ignored=True, ) ), ) # TODO(jelmer): Add test for dulwich.porcelain.daemon class UploadPackTests(PorcelainTestCase): """Tests for upload_pack.""" def test_upload_pack(self): outf = BytesIO() exitcode = porcelain.upload_pack(self.repo.path, BytesIO(b"0000"), outf) outlines = outf.getvalue().splitlines() self.assertEqual([b"0000"], outlines) self.assertEqual(0, exitcode) class ReceivePackTests(PorcelainTestCase): """Tests for receive_pack.""" def test_receive_pack(self): filename = "foo" fullpath = os.path.join(self.repo.path, filename) with open(fullpath, "w") as f: f.write("stuff") porcelain.add(repo=self.repo.path, paths=fullpath) self.repo.do_commit( message=b"test status", author=b"author ", committer=b"committer ", author_timestamp=1402354300, commit_timestamp=1402354300, author_timezone=0, commit_timezone=0, ) outf = BytesIO() exitcode = porcelain.receive_pack(self.repo.path, BytesIO(b"0000"), outf) outlines = outf.getvalue().splitlines() self.assertEqual( [ b"0091319b56ce3aee2d489f759736a79cc552c9bb86d9 HEAD\x00 report-status " # noqa: E501 b"delete-refs quiet ofs-delta side-band-64k " b"no-done symref=HEAD:refs/heads/master", b"003f319b56ce3aee2d489f759736a79cc552c9bb86d9 refs/heads/master", b"0000", ], outlines, ) self.assertEqual(0, exitcode) class BranchListTests(PorcelainTestCase): def test_standard(self): self.assertEqual(set([]), set(porcelain.branch_list(self.repo))) def test_new_branch(self): [c1] = build_commit_graph(self.repo.object_store, [[1]]) self.repo[b"HEAD"] = c1.id porcelain.branch_create(self.repo, b"foo") self.assertEqual( set([b"master", b"foo"]), set(porcelain.branch_list(self.repo)) ) class BranchCreateTests(PorcelainTestCase): def test_branch_exists(self): [c1] = build_commit_graph(self.repo.object_store, [[1]]) self.repo[b"HEAD"] = c1.id porcelain.branch_create(self.repo, b"foo") self.assertRaises(porcelain.Error, porcelain.branch_create, self.repo, b"foo") porcelain.branch_create(self.repo, b"foo", force=True) def test_new_branch(self): [c1] = build_commit_graph(self.repo.object_store, [[1]]) self.repo[b"HEAD"] = c1.id porcelain.branch_create(self.repo, b"foo") self.assertEqual( set([b"master", b"foo"]), set(porcelain.branch_list(self.repo)) ) class BranchDeleteTests(PorcelainTestCase): def test_simple(self): [c1] = build_commit_graph(self.repo.object_store, [[1]]) self.repo[b"HEAD"] = c1.id porcelain.branch_create(self.repo, b"foo") self.assertTrue(b"foo" in porcelain.branch_list(self.repo)) porcelain.branch_delete(self.repo, b"foo") self.assertFalse(b"foo" in porcelain.branch_list(self.repo)) def test_simple_unicode(self): [c1] = build_commit_graph(self.repo.object_store, [[1]]) self.repo[b"HEAD"] = c1.id porcelain.branch_create(self.repo, "foo") self.assertTrue(b"foo" in porcelain.branch_list(self.repo)) porcelain.branch_delete(self.repo, "foo") self.assertFalse(b"foo" in porcelain.branch_list(self.repo)) class FetchTests(PorcelainTestCase): def test_simple(self): outstream = BytesIO() errstream = BytesIO() # create a file for initial commit handle, fullpath = tempfile.mkstemp(dir=self.repo.path) os.close(handle) porcelain.add(repo=self.repo.path, paths=fullpath) porcelain.commit( repo=self.repo.path, message=b"test", author=b"test ", committer=b"test ", ) # Setup target repo target_path = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, target_path) target_repo = porcelain.clone( self.repo.path, target=target_path, errstream=errstream ) # create a second file to be pushed handle, fullpath = tempfile.mkstemp(dir=self.repo.path) os.close(handle) porcelain.add(repo=self.repo.path, paths=fullpath) porcelain.commit( repo=self.repo.path, message=b"test2", author=b"test2 ", committer=b"test2 ", ) self.assertFalse(self.repo[b"HEAD"].id in target_repo) target_repo.close() # Fetch changes into the cloned repo porcelain.fetch(target_path, "origin", outstream=outstream, errstream=errstream) # Assert that fetch updated the local image of the remote self.assert_correct_remote_refs(target_repo.get_refs(), self.repo.get_refs()) # Check the target repo for pushed changes with Repo(target_path) as r: self.assertTrue(self.repo[b"HEAD"].id in r) def test_with_remote_name(self): remote_name = "origin" outstream = BytesIO() errstream = BytesIO() # create a file for initial commit handle, fullpath = tempfile.mkstemp(dir=self.repo.path) os.close(handle) porcelain.add(repo=self.repo.path, paths=fullpath) porcelain.commit( repo=self.repo.path, message=b"test", author=b"test ", committer=b"test ", ) # Setup target repo target_path = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, target_path) target_repo = porcelain.clone( self.repo.path, target=target_path, errstream=errstream ) # Capture current refs target_refs = target_repo.get_refs() # create a second file to be pushed handle, fullpath = tempfile.mkstemp(dir=self.repo.path) os.close(handle) porcelain.add(repo=self.repo.path, paths=fullpath) porcelain.commit( repo=self.repo.path, message=b"test2", author=b"test2 ", committer=b"test2 ", ) self.assertFalse(self.repo[b"HEAD"].id in target_repo) target_config = target_repo.get_config() target_config.set( (b"remote", remote_name.encode()), b"url", self.repo.path.encode() ) target_repo.close() # Fetch changes into the cloned repo porcelain.fetch( target_path, remote_name, outstream=outstream, errstream=errstream ) # Assert that fetch updated the local image of the remote self.assert_correct_remote_refs(target_repo.get_refs(), self.repo.get_refs()) # Check the target repo for pushed changes, as well as updates # for the refs with Repo(target_path) as r: self.assertTrue(self.repo[b"HEAD"].id in r) self.assertNotEqual(self.repo.get_refs(), target_refs) def assert_correct_remote_refs( self, local_refs, remote_refs, remote_name=b"origin" ): """Assert that known remote refs corresponds to actual remote refs.""" local_ref_prefix = b"refs/heads" remote_ref_prefix = b"refs/remotes/" + remote_name locally_known_remote_refs = { k[len(remote_ref_prefix) + 1 :]: v for k, v in local_refs.items() if k.startswith(remote_ref_prefix) } normalized_remote_refs = { k[len(local_ref_prefix) + 1 :]: v for k, v in remote_refs.items() if k.startswith(local_ref_prefix) } self.assertEqual(locally_known_remote_refs, normalized_remote_refs) class RepackTests(PorcelainTestCase): def test_empty(self): porcelain.repack(self.repo) def test_simple(self): handle, fullpath = tempfile.mkstemp(dir=self.repo.path) os.close(handle) porcelain.add(repo=self.repo.path, paths=fullpath) porcelain.repack(self.repo) class LsTreeTests(PorcelainTestCase): def test_empty(self): porcelain.commit( repo=self.repo.path, message=b"test status", author=b"author ", committer=b"committer ", ) f = StringIO() porcelain.ls_tree(self.repo, b"HEAD", outstream=f) self.assertEqual(f.getvalue(), "") def test_simple(self): # Commit a dummy file then modify it fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("origstuff") porcelain.add(repo=self.repo.path, paths=[fullpath]) porcelain.commit( repo=self.repo.path, message=b"test status", author=b"author ", committer=b"committer ", ) f = StringIO() porcelain.ls_tree(self.repo, b"HEAD", outstream=f) self.assertEqual( - f.getvalue(), "100644 blob 8b82634d7eae019850bb883f06abf428c58bc9aa\tfoo\n" + f.getvalue(), + "100644 blob 8b82634d7eae019850bb883f06abf428c58bc9aa\tfoo\n", ) def test_recursive(self): # Create a directory then write a dummy file in it dirpath = os.path.join(self.repo.path, "adir") filepath = os.path.join(dirpath, "afile") os.mkdir(dirpath) with open(filepath, "w") as f: f.write("origstuff") porcelain.add(repo=self.repo.path, paths=[filepath]) porcelain.commit( repo=self.repo.path, message=b"test status", author=b"author ", committer=b"committer ", ) f = StringIO() porcelain.ls_tree(self.repo, b"HEAD", outstream=f) self.assertEqual( - f.getvalue(), "40000 tree b145cc69a5e17693e24d8a7be0016ed8075de66d\tadir\n" + f.getvalue(), + "40000 tree b145cc69a5e17693e24d8a7be0016ed8075de66d\tadir\n", ) f = StringIO() porcelain.ls_tree(self.repo, b"HEAD", outstream=f, recursive=True) self.assertEqual( f.getvalue(), "40000 tree b145cc69a5e17693e24d8a7be0016ed8075de66d\tadir\n" "100644 blob 8b82634d7eae019850bb883f06abf428c58bc9aa\tadir" "/afile\n", ) class LsRemoteTests(PorcelainTestCase): def test_empty(self): self.assertEqual({}, porcelain.ls_remote(self.repo.path)) def test_some(self): cid = porcelain.commit( repo=self.repo.path, message=b"test status", author=b"author ", committer=b"committer ", ) self.assertEqual( {b"refs/heads/master": cid, b"HEAD": cid}, porcelain.ls_remote(self.repo.path), ) class LsFilesTests(PorcelainTestCase): def test_empty(self): self.assertEqual([], list(porcelain.ls_files(self.repo))) def test_simple(self): # Commit a dummy file then modify it fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("origstuff") porcelain.add(repo=self.repo.path, paths=[fullpath]) self.assertEqual([b"foo"], list(porcelain.ls_files(self.repo))) class RemoteAddTests(PorcelainTestCase): def test_new(self): porcelain.remote_add(self.repo, "jelmer", "git://jelmer.uk/code/dulwich") c = self.repo.get_config() self.assertEqual( - c.get((b"remote", b"jelmer"), b"url"), b"git://jelmer.uk/code/dulwich" + c.get((b"remote", b"jelmer"), b"url"), + b"git://jelmer.uk/code/dulwich", ) def test_exists(self): porcelain.remote_add(self.repo, "jelmer", "git://jelmer.uk/code/dulwich") self.assertRaises( porcelain.RemoteExists, porcelain.remote_add, self.repo, "jelmer", "git://jelmer.uk/code/dulwich", ) class CheckIgnoreTests(PorcelainTestCase): def test_check_ignored(self): with open(os.path.join(self.repo.path, ".gitignore"), "w") as f: f.write("foo") foo_path = os.path.join(self.repo.path, "foo") with open(foo_path, "w") as f: f.write("BAR") bar_path = os.path.join(self.repo.path, "bar") with open(bar_path, "w") as f: f.write("BAR") self.assertEqual(["foo"], list(porcelain.check_ignore(self.repo, [foo_path]))) self.assertEqual([], list(porcelain.check_ignore(self.repo, [bar_path]))) def test_check_added_abs(self): path = os.path.join(self.repo.path, "foo") with open(path, "w") as f: f.write("BAR") self.repo.stage(["foo"]) with open(os.path.join(self.repo.path, ".gitignore"), "w") as f: f.write("foo\n") self.assertEqual([], list(porcelain.check_ignore(self.repo, [path]))) self.assertEqual( - ["foo"], list(porcelain.check_ignore(self.repo, [path], no_index=True)) + ["foo"], + list(porcelain.check_ignore(self.repo, [path], no_index=True)), ) def test_check_added_rel(self): with open(os.path.join(self.repo.path, "foo"), "w") as f: f.write("BAR") self.repo.stage(["foo"]) with open(os.path.join(self.repo.path, ".gitignore"), "w") as f: f.write("foo\n") cwd = os.getcwd() os.mkdir(os.path.join(self.repo.path, "bar")) os.chdir(os.path.join(self.repo.path, "bar")) try: self.assertEqual(list(porcelain.check_ignore(self.repo, ["../foo"])), []) self.assertEqual( ["../foo"], list(porcelain.check_ignore(self.repo, ["../foo"], no_index=True)), ) finally: os.chdir(cwd) class UpdateHeadTests(PorcelainTestCase): def test_set_to_branch(self): [c1] = build_commit_graph(self.repo.object_store, [[1]]) self.repo.refs[b"refs/heads/blah"] = c1.id porcelain.update_head(self.repo, "blah") self.assertEqual(c1.id, self.repo.head()) self.assertEqual(b"ref: refs/heads/blah", self.repo.refs.read_ref(b"HEAD")) def test_set_to_branch_detached(self): [c1] = build_commit_graph(self.repo.object_store, [[1]]) self.repo.refs[b"refs/heads/blah"] = c1.id porcelain.update_head(self.repo, "blah", detached=True) self.assertEqual(c1.id, self.repo.head()) self.assertEqual(c1.id, self.repo.refs.read_ref(b"HEAD")) def test_set_to_commit_detached(self): [c1] = build_commit_graph(self.repo.object_store, [[1]]) self.repo.refs[b"refs/heads/blah"] = c1.id porcelain.update_head(self.repo, c1.id, detached=True) self.assertEqual(c1.id, self.repo.head()) self.assertEqual(c1.id, self.repo.refs.read_ref(b"HEAD")) def test_set_new_branch(self): [c1] = build_commit_graph(self.repo.object_store, [[1]]) self.repo.refs[b"refs/heads/blah"] = c1.id porcelain.update_head(self.repo, "blah", new_branch="bar") self.assertEqual(c1.id, self.repo.head()) self.assertEqual(b"ref: refs/heads/bar", self.repo.refs.read_ref(b"HEAD")) class MailmapTests(PorcelainTestCase): def test_no_mailmap(self): self.assertEqual( b"Jelmer Vernooij ", porcelain.check_mailmap(self.repo, b"Jelmer Vernooij "), ) def test_mailmap_lookup(self): with open(os.path.join(self.repo.path, ".mailmap"), "wb") as f: f.write( b"""\ Jelmer Vernooij """ ) self.assertEqual( b"Jelmer Vernooij ", porcelain.check_mailmap(self.repo, b"Jelmer Vernooij "), ) class FsckTests(PorcelainTestCase): def test_none(self): self.assertEqual([], list(porcelain.fsck(self.repo))) def test_git_dir(self): obj = Tree() a = Blob() a.data = b"foo" obj.add(b".git", 0o100644, a.id) self.repo.object_store.add_objects([(a, None), (obj, None)]) self.assertEqual( [(obj.id, "invalid name .git")], [(sha, str(e)) for (sha, e) in porcelain.fsck(self.repo)], ) class DescribeTests(PorcelainTestCase): def test_no_commits(self): self.assertRaises(KeyError, porcelain.describe, self.repo.path) def test_single_commit(self): fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") porcelain.add(repo=self.repo.path, paths=[fullpath]) sha = porcelain.commit( self.repo.path, message=b"Some message", author=b"Joe ", committer=b"Bob ", ) self.assertEqual( - "g{}".format(sha[:7].decode("ascii")), porcelain.describe(self.repo.path) + "g{}".format(sha[:7].decode("ascii")), + porcelain.describe(self.repo.path), ) def test_tag(self): fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") porcelain.add(repo=self.repo.path, paths=[fullpath]) porcelain.commit( self.repo.path, message=b"Some message", author=b"Joe ", committer=b"Bob ", ) porcelain.tag_create( - self.repo.path, b"tryme", b"foo ", b"bar", annotated=True + self.repo.path, + b"tryme", + b"foo ", + b"bar", + annotated=True, ) self.assertEqual("tryme", porcelain.describe(self.repo.path)) def test_tag_and_commit(self): fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") porcelain.add(repo=self.repo.path, paths=[fullpath]) porcelain.commit( self.repo.path, message=b"Some message", author=b"Joe ", committer=b"Bob ", ) porcelain.tag_create( - self.repo.path, b"tryme", b"foo ", b"bar", annotated=True + self.repo.path, + b"tryme", + b"foo ", + b"bar", + annotated=True, ) with open(fullpath, "w") as f: f.write("BAR2") porcelain.add(repo=self.repo.path, paths=[fullpath]) sha = porcelain.commit( self.repo.path, message=b"Some message", author=b"Joe ", committer=b"Bob ", ) self.assertEqual( "tryme-1-g{}".format(sha[:7].decode("ascii")), porcelain.describe(self.repo.path), ) class PathToTreeTests(PorcelainTestCase): def setUp(self): super(PathToTreeTests, self).setUp() self.fp = os.path.join(self.test_dir, "bar") with open(self.fp, "w") as f: f.write("something") oldcwd = os.getcwd() self.addCleanup(os.chdir, oldcwd) os.chdir(self.test_dir) def test_path_to_tree_path_base(self): self.assertEqual(b"bar", porcelain.path_to_tree_path(self.test_dir, self.fp)) self.assertEqual(b"bar", porcelain.path_to_tree_path(".", "./bar")) self.assertEqual(b"bar", porcelain.path_to_tree_path(".", "bar")) cwd = os.getcwd() self.assertEqual( b"bar", porcelain.path_to_tree_path(".", os.path.join(cwd, "bar")) ) self.assertEqual(b"bar", porcelain.path_to_tree_path(cwd, "bar")) def test_path_to_tree_path_syntax(self): self.assertEqual(b"bar", porcelain.path_to_tree_path(".", "./bar")) def test_path_to_tree_path_error(self): with self.assertRaises(ValueError): with tempfile.TemporaryDirectory() as od: porcelain.path_to_tree_path(od, self.fp) def test_path_to_tree_path_rel(self): cwd = os.getcwd() os.mkdir(os.path.join(self.repo.path, "foo")) os.mkdir(os.path.join(self.repo.path, "foo/bar")) try: os.chdir(os.path.join(self.repo.path, "foo/bar")) with open("baz", "w") as f: f.write("contents") self.assertEqual(b"bar/baz", porcelain.path_to_tree_path("..", "baz")) self.assertEqual( b"bar/baz", porcelain.path_to_tree_path( - os.path.join(os.getcwd(), ".."), os.path.join(os.getcwd(), "baz") + os.path.join(os.getcwd(), ".."), + os.path.join(os.getcwd(), "baz"), ), ) self.assertEqual( b"bar/baz", porcelain.path_to_tree_path("..", os.path.join(os.getcwd(), "baz")), ) self.assertEqual( b"bar/baz", porcelain.path_to_tree_path(os.path.join(os.getcwd(), ".."), "baz"), ) finally: os.chdir(cwd) class GetObjectByPathTests(PorcelainTestCase): def test_simple(self): fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") porcelain.add(repo=self.repo.path, paths=[fullpath]) porcelain.commit( self.repo.path, message=b"Some message", author=b"Joe ", committer=b"Bob ", ) self.assertEqual(b"BAR", porcelain.get_object_by_path(self.repo, "foo").data) self.assertEqual(b"BAR", porcelain.get_object_by_path(self.repo, b"foo").data) def test_encoding(self): fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") porcelain.add(repo=self.repo.path, paths=[fullpath]) porcelain.commit( self.repo.path, message=b"Some message", author=b"Joe ", committer=b"Bob ", encoding=b"utf-8", ) self.assertEqual(b"BAR", porcelain.get_object_by_path(self.repo, "foo").data) self.assertEqual(b"BAR", porcelain.get_object_by_path(self.repo, b"foo").data) def test_missing(self): self.assertRaises(KeyError, porcelain.get_object_by_path, self.repo, "foo") class WriteTreeTests(PorcelainTestCase): def test_simple(self): fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") porcelain.add(repo=self.repo.path, paths=[fullpath]) self.assertEqual( - b"d2092c8a9f311f0311083bf8d177f2ca0ab5b241", porcelain.write_tree(self.repo) + b"d2092c8a9f311f0311083bf8d177f2ca0ab5b241", + porcelain.write_tree(self.repo), ) class ActiveBranchTests(PorcelainTestCase): def test_simple(self): self.assertEqual(b"master", porcelain.active_branch(self.repo)) diff --git a/dulwich/tests/test_protocol.py b/dulwich/tests/test_protocol.py index 37f7d5dc..9985fa4a 100644 --- a/dulwich/tests/test_protocol.py +++ b/dulwich/tests/test_protocol.py @@ -1,320 +1,323 @@ # test_protocol.py -- Tests for the git protocol # Copyright (C) 2009 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Tests for the smart protocol utility functions.""" from io import BytesIO from dulwich.errors import ( HangupException, ) from dulwich.protocol import ( GitProtocolError, PktLineParser, Protocol, ReceivableProtocol, extract_capabilities, extract_want_line_capabilities, ack_type, SINGLE_ACK, MULTI_ACK, MULTI_ACK_DETAILED, BufferedPktLineWriter, ) from dulwich.tests import TestCase class BaseProtocolTests(object): def test_write_pkt_line_none(self): self.proto.write_pkt_line(None) self.assertEqual(self.rout.getvalue(), b"0000") def test_write_pkt_line(self): self.proto.write_pkt_line(b"bla") self.assertEqual(self.rout.getvalue(), b"0007bla") def test_read_pkt_line(self): self.rin.write(b"0008cmd ") self.rin.seek(0) self.assertEqual(b"cmd ", self.proto.read_pkt_line()) def test_eof(self): self.rin.write(b"0000") self.rin.seek(0) self.assertFalse(self.proto.eof()) self.assertEqual(None, self.proto.read_pkt_line()) self.assertTrue(self.proto.eof()) self.assertRaises(HangupException, self.proto.read_pkt_line) def test_unread_pkt_line(self): self.rin.write(b"0007foo0000") self.rin.seek(0) self.assertEqual(b"foo", self.proto.read_pkt_line()) self.proto.unread_pkt_line(b"bar") self.assertEqual(b"bar", self.proto.read_pkt_line()) self.assertEqual(None, self.proto.read_pkt_line()) self.proto.unread_pkt_line(b"baz1") self.assertRaises(ValueError, self.proto.unread_pkt_line, b"baz2") def test_read_pkt_seq(self): self.rin.write(b"0008cmd 0005l0000") self.rin.seek(0) self.assertEqual([b"cmd ", b"l"], list(self.proto.read_pkt_seq())) def test_read_pkt_line_none(self): self.rin.write(b"0000") self.rin.seek(0) self.assertEqual(None, self.proto.read_pkt_line()) def test_read_pkt_line_wrong_size(self): self.rin.write(b"0100too short") self.rin.seek(0) self.assertRaises(GitProtocolError, self.proto.read_pkt_line) def test_write_sideband(self): self.proto.write_sideband(3, b"bloe") self.assertEqual(self.rout.getvalue(), b"0009\x03bloe") def test_send_cmd(self): self.proto.send_cmd(b"fetch", b"a", b"b") self.assertEqual(self.rout.getvalue(), b"000efetch a\x00b\x00") def test_read_cmd(self): self.rin.write(b"0012cmd arg1\x00arg2\x00") self.rin.seek(0) self.assertEqual((b"cmd", [b"arg1", b"arg2"]), self.proto.read_cmd()) def test_read_cmd_noend0(self): self.rin.write(b"0011cmd arg1\x00arg2") self.rin.seek(0) self.assertRaises(AssertionError, self.proto.read_cmd) class ProtocolTests(BaseProtocolTests, TestCase): def setUp(self): TestCase.setUp(self) self.rout = BytesIO() self.rin = BytesIO() self.proto = Protocol(self.rin.read, self.rout.write) class ReceivableBytesIO(BytesIO): """BytesIO with socket-like recv semantics for testing.""" def __init__(self): BytesIO.__init__(self) self.allow_read_past_eof = False def recv(self, size): # fail fast if no bytes are available; in a real socket, this would # block forever if self.tell() == len(self.getvalue()) and not self.allow_read_past_eof: raise GitProtocolError("Blocking read past end of socket") if size == 1: return self.read(1) # calls shouldn't return quite as much as asked for return self.read(size - 1) class ReceivableProtocolTests(BaseProtocolTests, TestCase): def setUp(self): TestCase.setUp(self) self.rout = BytesIO() self.rin = ReceivableBytesIO() self.proto = ReceivableProtocol(self.rin.recv, self.rout.write) self.proto._rbufsize = 8 def test_eof(self): # Allow blocking reads past EOF just for this test. The only parts of # the protocol that might check for EOF do not depend on the recv() # semantics anyway. self.rin.allow_read_past_eof = True BaseProtocolTests.test_eof(self) def test_recv(self): all_data = b"1234567" * 10 # not a multiple of bufsize self.rin.write(all_data) self.rin.seek(0) data = b"" # We ask for 8 bytes each time and actually read 7, so it should take # exactly 10 iterations. for _ in range(10): data += self.proto.recv(10) # any more reads would block self.assertRaises(GitProtocolError, self.proto.recv, 10) self.assertEqual(all_data, data) def test_recv_read(self): all_data = b"1234567" # recv exactly in one call self.rin.write(all_data) self.rin.seek(0) self.assertEqual(b"1234", self.proto.recv(4)) self.assertEqual(b"567", self.proto.read(3)) self.assertRaises(GitProtocolError, self.proto.recv, 10) def test_read_recv(self): all_data = b"12345678abcdefg" self.rin.write(all_data) self.rin.seek(0) self.assertEqual(b"1234", self.proto.read(4)) self.assertEqual(b"5678abc", self.proto.recv(8)) self.assertEqual(b"defg", self.proto.read(4)) self.assertRaises(GitProtocolError, self.proto.recv, 10) def test_mixed(self): # arbitrary non-repeating string all_data = b",".join(str(i).encode("ascii") for i in range(100)) self.rin.write(all_data) self.rin.seek(0) data = b"" for i in range(1, 100): data += self.proto.recv(i) # if we get to the end, do a non-blocking read instead of blocking if len(data) + i > len(all_data): data += self.proto.recv(i) # ReceivableBytesIO leaves off the last byte unless we ask # nicely data += self.proto.recv(1) break else: data += self.proto.read(i) else: # didn't break, something must have gone wrong self.fail() self.assertEqual(all_data, data) class CapabilitiesTestCase(TestCase): def test_plain(self): self.assertEqual((b"bla", []), extract_capabilities(b"bla")) def test_caps(self): self.assertEqual((b"bla", [b"la"]), extract_capabilities(b"bla\0la")) self.assertEqual((b"bla", [b"la"]), extract_capabilities(b"bla\0la\n")) self.assertEqual((b"bla", [b"la", b"la"]), extract_capabilities(b"bla\0la la")) def test_plain_want_line(self): self.assertEqual((b"want bla", []), extract_want_line_capabilities(b"want bla")) def test_caps_want_line(self): self.assertEqual( - (b"want bla", [b"la"]), extract_want_line_capabilities(b"want bla la") + (b"want bla", [b"la"]), + extract_want_line_capabilities(b"want bla la"), ) self.assertEqual( - (b"want bla", [b"la"]), extract_want_line_capabilities(b"want bla la\n") + (b"want bla", [b"la"]), + extract_want_line_capabilities(b"want bla la\n"), ) self.assertEqual( (b"want bla", [b"la", b"la"]), extract_want_line_capabilities(b"want bla la la"), ) def test_ack_type(self): self.assertEqual(SINGLE_ACK, ack_type([b"foo", b"bar"])) self.assertEqual(MULTI_ACK, ack_type([b"foo", b"bar", b"multi_ack"])) self.assertEqual( - MULTI_ACK_DETAILED, ack_type([b"foo", b"bar", b"multi_ack_detailed"]) + MULTI_ACK_DETAILED, + ack_type([b"foo", b"bar", b"multi_ack_detailed"]), ) # choose detailed when both present self.assertEqual( MULTI_ACK_DETAILED, ack_type([b"foo", b"bar", b"multi_ack", b"multi_ack_detailed"]), ) class BufferedPktLineWriterTests(TestCase): def setUp(self): TestCase.setUp(self) self._output = BytesIO() self._writer = BufferedPktLineWriter(self._output.write, bufsize=16) def assertOutputEquals(self, expected): self.assertEqual(expected, self._output.getvalue()) def _truncate(self): self._output.seek(0) self._output.truncate() def test_write(self): self._writer.write(b"foo") self.assertOutputEquals(b"") self._writer.flush() self.assertOutputEquals(b"0007foo") def test_write_none(self): self._writer.write(None) self.assertOutputEquals(b"") self._writer.flush() self.assertOutputEquals(b"0000") def test_flush_empty(self): self._writer.flush() self.assertOutputEquals(b"") def test_write_multiple(self): self._writer.write(b"foo") self._writer.write(b"bar") self.assertOutputEquals(b"") self._writer.flush() self.assertOutputEquals(b"0007foo0007bar") def test_write_across_boundary(self): self._writer.write(b"foo") self._writer.write(b"barbaz") self.assertOutputEquals(b"0007foo000abarba") self._truncate() self._writer.flush() self.assertOutputEquals(b"z") def test_write_to_boundary(self): self._writer.write(b"foo") self._writer.write(b"barba") self.assertOutputEquals(b"0007foo0009barba") self._truncate() self._writer.write(b"z") self._writer.flush() self.assertOutputEquals(b"0005z") class PktLineParserTests(TestCase): def test_none(self): pktlines = [] parser = PktLineParser(pktlines.append) parser.parse(b"0000") self.assertEqual(pktlines, [None]) self.assertEqual(b"", parser.get_tail()) def test_small_fragments(self): pktlines = [] parser = PktLineParser(pktlines.append) parser.parse(b"00") parser.parse(b"05") parser.parse(b"z0000") self.assertEqual(pktlines, [b"z", None]) self.assertEqual(b"", parser.get_tail()) def test_multiple_packets(self): pktlines = [] parser = PktLineParser(pktlines.append) parser.parse(b"0005z0006aba") self.assertEqual(pktlines, [b"z", b"ab"]) self.assertEqual(b"a", parser.get_tail()) diff --git a/dulwich/tests/test_refs.py b/dulwich/tests/test_refs.py index 059e3aa1..73c69341 100644 --- a/dulwich/tests/test_refs.py +++ b/dulwich/tests/test_refs.py @@ -1,761 +1,780 @@ # test_refs.py -- tests for refs.py # encoding: utf-8 # Copyright (C) 2013 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Tests for dulwich.refs.""" from io import BytesIO import os import sys import tempfile from dulwich import errors from dulwich.file import ( GitFile, ) from dulwich.objects import ZERO_SHA from dulwich.refs import ( DictRefsContainer, InfoRefsContainer, check_ref_format, _split_ref_line, parse_symref_value, read_packed_refs_with_peeled, read_packed_refs, strip_peeled_refs, write_packed_refs, ) from dulwich.repo import Repo from dulwich.tests import ( SkipTest, TestCase, ) from dulwich.tests.utils import ( open_repo, tear_down_repo, ) class CheckRefFormatTests(TestCase): """Tests for the check_ref_format function. These are the same tests as in the git test suite. """ def test_valid(self): self.assertTrue(check_ref_format(b"heads/foo")) self.assertTrue(check_ref_format(b"foo/bar/baz")) self.assertTrue(check_ref_format(b"refs///heads/foo")) self.assertTrue(check_ref_format(b"foo./bar")) self.assertTrue(check_ref_format(b"heads/foo@bar")) self.assertTrue(check_ref_format(b"heads/fix.lock.error")) def test_invalid(self): self.assertFalse(check_ref_format(b"foo")) self.assertFalse(check_ref_format(b"heads/foo/")) self.assertFalse(check_ref_format(b"./foo")) self.assertFalse(check_ref_format(b".refs/foo")) self.assertFalse(check_ref_format(b"heads/foo..bar")) self.assertFalse(check_ref_format(b"heads/foo?bar")) self.assertFalse(check_ref_format(b"heads/foo.lock")) self.assertFalse(check_ref_format(b"heads/v@{ation")) self.assertFalse(check_ref_format(b"heads/foo\bar")) ONES = b"1" * 40 TWOS = b"2" * 40 THREES = b"3" * 40 FOURS = b"4" * 40 class PackedRefsFileTests(TestCase): def test_split_ref_line_errors(self): self.assertRaises(errors.PackedRefsException, _split_ref_line, b"singlefield") self.assertRaises(errors.PackedRefsException, _split_ref_line, b"badsha name") self.assertRaises( - errors.PackedRefsException, _split_ref_line, ONES + b" bad/../refname" + errors.PackedRefsException, + _split_ref_line, + ONES + b" bad/../refname", ) def test_read_without_peeled(self): f = BytesIO(b"\n".join([b"# comment", ONES + b" ref/1", TWOS + b" ref/2"])) self.assertEqual( [(ONES, b"ref/1"), (TWOS, b"ref/2")], list(read_packed_refs(f)) ) def test_read_without_peeled_errors(self): f = BytesIO(b"\n".join([ONES + b" ref/1", b"^" + TWOS])) self.assertRaises(errors.PackedRefsException, list, read_packed_refs(f)) def test_read_with_peeled(self): f = BytesIO( b"\n".join( - [ONES + b" ref/1", TWOS + b" ref/2", b"^" + THREES, FOURS + b" ref/4"] + [ + ONES + b" ref/1", + TWOS + b" ref/2", + b"^" + THREES, + FOURS + b" ref/4", + ] ) ) self.assertEqual( [ (ONES, b"ref/1", None), (TWOS, b"ref/2", THREES), (FOURS, b"ref/4", None), ], list(read_packed_refs_with_peeled(f)), ) def test_read_with_peeled_errors(self): f = BytesIO(b"\n".join([b"^" + TWOS, ONES + b" ref/1"])) self.assertRaises(errors.PackedRefsException, list, read_packed_refs(f)) f = BytesIO(b"\n".join([ONES + b" ref/1", b"^" + TWOS, b"^" + THREES])) self.assertRaises(errors.PackedRefsException, list, read_packed_refs(f)) def test_write_with_peeled(self): f = BytesIO() write_packed_refs(f, {b"ref/1": ONES, b"ref/2": TWOS}, {b"ref/1": THREES}) self.assertEqual( b"\n".join( [ b"# pack-refs with: peeled", ONES + b" ref/1", b"^" + THREES, TWOS + b" ref/2", ] ) + b"\n", f.getvalue(), ) def test_write_without_peeled(self): f = BytesIO() write_packed_refs(f, {b"ref/1": ONES, b"ref/2": TWOS}) self.assertEqual( - b"\n".join([ONES + b" ref/1", TWOS + b" ref/2"]) + b"\n", f.getvalue() + b"\n".join([ONES + b" ref/1", TWOS + b" ref/2"]) + b"\n", + f.getvalue(), ) # Dict of refs that we expect all RefsContainerTests subclasses to define. _TEST_REFS = { b"HEAD": b"42d06bd4b77fed026b154d16493e5deab78f02ec", b"refs/heads/40-char-ref-aaaaaaaaaaaaaaaaaa": b"42d06bd4b77fed026b154d16493e5deab78f02ec", b"refs/heads/master": b"42d06bd4b77fed026b154d16493e5deab78f02ec", b"refs/heads/packed": b"42d06bd4b77fed026b154d16493e5deab78f02ec", b"refs/tags/refs-0.1": b"df6800012397fb85c56e7418dd4eb9405dee075c", b"refs/tags/refs-0.2": b"3ec9c43c84ff242e3ef4a9fc5bc111fd780a76a8", b"refs/heads/loop": b"ref: refs/heads/loop", } class RefsContainerTests(object): def test_keys(self): actual_keys = set(self._refs.keys()) self.assertEqual(set(self._refs.allkeys()), actual_keys) self.assertEqual(set(_TEST_REFS.keys()), actual_keys) actual_keys = self._refs.keys(b"refs/heads") actual_keys.discard(b"loop") self.assertEqual( [b"40-char-ref-aaaaaaaaaaaaaaaaaa", b"master", b"packed"], sorted(actual_keys), ) self.assertEqual( [b"refs-0.1", b"refs-0.2"], sorted(self._refs.keys(b"refs/tags")) ) def test_iter(self): actual_keys = set(self._refs.keys()) self.assertEqual(set(self._refs), actual_keys) self.assertEqual(set(_TEST_REFS.keys()), actual_keys) def test_as_dict(self): # refs/heads/loop does not show up even if it exists expected_refs = dict(_TEST_REFS) del expected_refs[b"refs/heads/loop"] self.assertEqual(expected_refs, self._refs.as_dict()) def test_get_symrefs(self): self._refs.set_symbolic_ref(b"refs/heads/src", b"refs/heads/dst") symrefs = self._refs.get_symrefs() if b"HEAD" in symrefs: symrefs.pop(b"HEAD") self.assertEqual( { b"refs/heads/src": b"refs/heads/dst", b"refs/heads/loop": b"refs/heads/loop", }, symrefs, ) def test_setitem(self): self._refs[b"refs/some/ref"] = b"42d06bd4b77fed026b154d16493e5deab78f02ec" self.assertEqual( - b"42d06bd4b77fed026b154d16493e5deab78f02ec", self._refs[b"refs/some/ref"] + b"42d06bd4b77fed026b154d16493e5deab78f02ec", + self._refs[b"refs/some/ref"], ) self.assertRaises( errors.RefFormatError, self._refs.__setitem__, b"notrefs/foo", b"42d06bd4b77fed026b154d16493e5deab78f02ec", ) def test_set_if_equals(self): nines = b"9" * 40 self.assertFalse(self._refs.set_if_equals(b"HEAD", b"c0ffee", nines)) self.assertEqual( b"42d06bd4b77fed026b154d16493e5deab78f02ec", self._refs[b"HEAD"] ) self.assertTrue( self._refs.set_if_equals( b"HEAD", b"42d06bd4b77fed026b154d16493e5deab78f02ec", nines ) ) self.assertEqual(nines, self._refs[b"HEAD"]) # Setting the ref again is a no-op, but will return True. self.assertTrue(self._refs.set_if_equals(b"HEAD", nines, nines)) self.assertEqual(nines, self._refs[b"HEAD"]) self.assertTrue(self._refs.set_if_equals(b"refs/heads/master", None, nines)) self.assertEqual(nines, self._refs[b"refs/heads/master"]) self.assertTrue( self._refs.set_if_equals(b"refs/heads/nonexistant", ZERO_SHA, nines) ) self.assertEqual(nines, self._refs[b"refs/heads/nonexistant"]) def test_add_if_new(self): nines = b"9" * 40 self.assertFalse(self._refs.add_if_new(b"refs/heads/master", nines)) self.assertEqual( b"42d06bd4b77fed026b154d16493e5deab78f02ec", self._refs[b"refs/heads/master"], ) self.assertTrue(self._refs.add_if_new(b"refs/some/ref", nines)) self.assertEqual(nines, self._refs[b"refs/some/ref"]) def test_set_symbolic_ref(self): self._refs.set_symbolic_ref(b"refs/heads/symbolic", b"refs/heads/master") self.assertEqual( - b"ref: refs/heads/master", self._refs.read_loose_ref(b"refs/heads/symbolic") + b"ref: refs/heads/master", + self._refs.read_loose_ref(b"refs/heads/symbolic"), ) self.assertEqual( b"42d06bd4b77fed026b154d16493e5deab78f02ec", self._refs[b"refs/heads/symbolic"], ) def test_set_symbolic_ref_overwrite(self): nines = b"9" * 40 self.assertFalse(b"refs/heads/symbolic" in self._refs) self._refs[b"refs/heads/symbolic"] = nines self.assertEqual(nines, self._refs.read_loose_ref(b"refs/heads/symbolic")) self._refs.set_symbolic_ref(b"refs/heads/symbolic", b"refs/heads/master") self.assertEqual( - b"ref: refs/heads/master", self._refs.read_loose_ref(b"refs/heads/symbolic") + b"ref: refs/heads/master", + self._refs.read_loose_ref(b"refs/heads/symbolic"), ) self.assertEqual( b"42d06bd4b77fed026b154d16493e5deab78f02ec", self._refs[b"refs/heads/symbolic"], ) def test_check_refname(self): self._refs._check_refname(b"HEAD") self._refs._check_refname(b"refs/stash") self._refs._check_refname(b"refs/heads/foo") self.assertRaises(errors.RefFormatError, self._refs._check_refname, b"refs") self.assertRaises( errors.RefFormatError, self._refs._check_refname, b"notrefs/foo" ) def test_contains(self): self.assertTrue(b"refs/heads/master" in self._refs) self.assertFalse(b"refs/heads/bar" in self._refs) def test_delitem(self): self.assertEqual( b"42d06bd4b77fed026b154d16493e5deab78f02ec", self._refs[b"refs/heads/master"], ) del self._refs[b"refs/heads/master"] self.assertRaises(KeyError, lambda: self._refs[b"refs/heads/master"]) def test_remove_if_equals(self): self.assertFalse(self._refs.remove_if_equals(b"HEAD", b"c0ffee")) self.assertEqual( b"42d06bd4b77fed026b154d16493e5deab78f02ec", self._refs[b"HEAD"] ) self.assertTrue( self._refs.remove_if_equals( - b"refs/tags/refs-0.2", b"3ec9c43c84ff242e3ef4a9fc5bc111fd780a76a8" + b"refs/tags/refs-0.2", + b"3ec9c43c84ff242e3ef4a9fc5bc111fd780a76a8", ) ) self.assertTrue(self._refs.remove_if_equals(b"refs/tags/refs-0.2", ZERO_SHA)) self.assertFalse(b"refs/tags/refs-0.2" in self._refs) def test_import_refs_name(self): self._refs[ b"refs/remotes/origin/other" ] = b"48d01bd4b77fed026b154d16493e5deab78f02ec" self._refs.import_refs( b"refs/remotes/origin", {b"master": b"42d06bd4b77fed026b154d16493e5deab78f02ec"}, ) self.assertEqual( b"42d06bd4b77fed026b154d16493e5deab78f02ec", self._refs[b"refs/remotes/origin/master"], ) self.assertEqual( b"48d01bd4b77fed026b154d16493e5deab78f02ec", self._refs[b"refs/remotes/origin/other"], ) def test_import_refs_name_prune(self): self._refs[ b"refs/remotes/origin/other" ] = b"48d01bd4b77fed026b154d16493e5deab78f02ec" self._refs.import_refs( b"refs/remotes/origin", {b"master": b"42d06bd4b77fed026b154d16493e5deab78f02ec"}, prune=True, ) self.assertEqual( b"42d06bd4b77fed026b154d16493e5deab78f02ec", self._refs[b"refs/remotes/origin/master"], ) self.assertNotIn(b"refs/remotes/origin/other", self._refs) def test_watch(self): try: watcher = self._refs.watch() except (NotImplementedError, ImportError): self.skipTest("watching not supported") with watcher: self._refs[ b"refs/remotes/origin/other" ] = b"48d01bd4b77fed026b154d16493e5deab78f02ec" change = next(watcher) self.assertEqual( ( b"refs/remotes/origin/other", b"48d01bd4b77fed026b154d16493e5deab78f02ec", ), change, ) self._refs[ b"refs/remotes/origin/other" ] = b"48d01bd4b77fed026b154d16493e5deab78f02ed" change = next(watcher) self.assertEqual( ( b"refs/remotes/origin/other", b"48d01bd4b77fed026b154d16493e5deab78f02ed", ), change, ) del self._refs[b"refs/remotes/origin/other"] change = next(watcher) self.assertEqual((b"refs/remotes/origin/other", None), change) class DictRefsContainerTests(RefsContainerTests, TestCase): def setUp(self): TestCase.setUp(self) self._refs = DictRefsContainer(dict(_TEST_REFS)) def test_invalid_refname(self): # FIXME: Move this test into RefsContainerTests, but requires # some way of injecting invalid refs. self._refs._refs[b"refs/stash"] = b"00" * 20 expected_refs = dict(_TEST_REFS) del expected_refs[b"refs/heads/loop"] expected_refs[b"refs/stash"] = b"00" * 20 self.assertEqual(expected_refs, self._refs.as_dict()) class DiskRefsContainerTests(RefsContainerTests, TestCase): def setUp(self): TestCase.setUp(self) self._repo = open_repo("refs.git") self.addCleanup(tear_down_repo, self._repo) self._refs = self._repo.refs def test_get_packed_refs(self): self.assertEqual( { b"refs/heads/packed": b"42d06bd4b77fed026b154d16493e5deab78f02ec", b"refs/tags/refs-0.1": b"df6800012397fb85c56e7418dd4eb9405dee075c", }, self._refs.get_packed_refs(), ) def test_get_peeled_not_packed(self): # not packed self.assertEqual(None, self._refs.get_peeled(b"refs/tags/refs-0.2")) self.assertEqual( b"3ec9c43c84ff242e3ef4a9fc5bc111fd780a76a8", self._refs[b"refs/tags/refs-0.2"], ) # packed, known not peelable self.assertEqual( self._refs[b"refs/heads/packed"], self._refs.get_peeled(b"refs/heads/packed"), ) # packed, peeled self.assertEqual( b"42d06bd4b77fed026b154d16493e5deab78f02ec", self._refs.get_peeled(b"refs/tags/refs-0.1"), ) def test_setitem(self): RefsContainerTests.test_setitem(self) path = os.path.join(self._refs.path, b"refs", b"some", b"ref") with open(path, "rb") as f: self.assertEqual(b"42d06bd4b77fed026b154d16493e5deab78f02ec", f.read()[:40]) self.assertRaises( OSError, self._refs.__setitem__, b"refs/some/ref/sub", b"42d06bd4b77fed026b154d16493e5deab78f02ec", ) def test_setitem_packed(self): with open(os.path.join(self._refs.path, b"packed-refs"), "w") as f: f.write("# pack-refs with: peeled fully-peeled sorted \n") f.write("42d06bd4b77fed026b154d16493e5deab78f02ec refs/heads/packed\n") # It's allowed to set a new ref on a packed ref, the new ref will be # placed outside on refs/ self._refs[b"refs/heads/packed"] = b"3ec9c43c84ff242e3ef4a9fc5bc111fd780a76a8" packed_ref_path = os.path.join(self._refs.path, b"refs", b"heads", b"packed") with open(packed_ref_path, "rb") as f: self.assertEqual(b"3ec9c43c84ff242e3ef4a9fc5bc111fd780a76a8", f.read()[:40]) self.assertRaises( OSError, self._refs.__setitem__, b"refs/heads/packed/sub", b"42d06bd4b77fed026b154d16493e5deab78f02ec", ) def test_setitem_symbolic(self): ones = b"1" * 40 self._refs[b"HEAD"] = ones self.assertEqual(ones, self._refs[b"HEAD"]) # ensure HEAD was not modified f = open(os.path.join(self._refs.path, b"HEAD"), "rb") v = next(iter(f)).rstrip(b"\n\r") f.close() self.assertEqual(b"ref: refs/heads/master", v) # ensure the symbolic link was written through f = open(os.path.join(self._refs.path, b"refs", b"heads", b"master"), "rb") self.assertEqual(ones, f.read()[:40]) f.close() def test_set_if_equals(self): RefsContainerTests.test_set_if_equals(self) # ensure symref was followed self.assertEqual(b"9" * 40, self._refs[b"refs/heads/master"]) # ensure lockfile was deleted self.assertFalse( os.path.exists( os.path.join(self._refs.path, b"refs", b"heads", b"master.lock") ) ) self.assertFalse(os.path.exists(os.path.join(self._refs.path, b"HEAD.lock"))) def test_add_if_new_packed(self): # don't overwrite packed ref self.assertFalse(self._refs.add_if_new(b"refs/tags/refs-0.1", b"9" * 40)) self.assertEqual( b"df6800012397fb85c56e7418dd4eb9405dee075c", self._refs[b"refs/tags/refs-0.1"], ) def test_add_if_new_symbolic(self): # Use an empty repo instead of the default. repo_dir = os.path.join(tempfile.mkdtemp(), "test") os.makedirs(repo_dir) repo = Repo.init(repo_dir) self.addCleanup(tear_down_repo, repo) refs = repo.refs nines = b"9" * 40 self.assertEqual(b"ref: refs/heads/master", refs.read_ref(b"HEAD")) self.assertFalse(b"refs/heads/master" in refs) self.assertTrue(refs.add_if_new(b"HEAD", nines)) self.assertEqual(b"ref: refs/heads/master", refs.read_ref(b"HEAD")) self.assertEqual(nines, refs[b"HEAD"]) self.assertEqual(nines, refs[b"refs/heads/master"]) self.assertFalse(refs.add_if_new(b"HEAD", b"1" * 40)) self.assertEqual(nines, refs[b"HEAD"]) self.assertEqual(nines, refs[b"refs/heads/master"]) def test_follow(self): self.assertEqual( ( [b"HEAD", b"refs/heads/master"], b"42d06bd4b77fed026b154d16493e5deab78f02ec", ), self._refs.follow(b"HEAD"), ) self.assertEqual( - ([b"refs/heads/master"], b"42d06bd4b77fed026b154d16493e5deab78f02ec"), + ( + [b"refs/heads/master"], + b"42d06bd4b77fed026b154d16493e5deab78f02ec", + ), self._refs.follow(b"refs/heads/master"), ) self.assertRaises(KeyError, self._refs.follow, b"refs/heads/loop") def test_delitem(self): RefsContainerTests.test_delitem(self) ref_file = os.path.join(self._refs.path, b"refs", b"heads", b"master") self.assertFalse(os.path.exists(ref_file)) self.assertFalse(b"refs/heads/master" in self._refs.get_packed_refs()) def test_delitem_symbolic(self): self.assertEqual(b"ref: refs/heads/master", self._refs.read_loose_ref(b"HEAD")) del self._refs[b"HEAD"] self.assertRaises(KeyError, lambda: self._refs[b"HEAD"]) self.assertEqual( b"42d06bd4b77fed026b154d16493e5deab78f02ec", self._refs[b"refs/heads/master"], ) self.assertFalse(os.path.exists(os.path.join(self._refs.path, b"HEAD"))) def test_remove_if_equals_symref(self): # HEAD is a symref, so shouldn't equal its dereferenced value self.assertFalse( self._refs.remove_if_equals( b"HEAD", b"42d06bd4b77fed026b154d16493e5deab78f02ec" ) ) self.assertTrue( self._refs.remove_if_equals( - b"refs/heads/master", b"42d06bd4b77fed026b154d16493e5deab78f02ec" + b"refs/heads/master", + b"42d06bd4b77fed026b154d16493e5deab78f02ec", ) ) self.assertRaises(KeyError, lambda: self._refs[b"refs/heads/master"]) # HEAD is now a broken symref self.assertRaises(KeyError, lambda: self._refs[b"HEAD"]) self.assertEqual(b"ref: refs/heads/master", self._refs.read_loose_ref(b"HEAD")) self.assertFalse( os.path.exists( os.path.join(self._refs.path, b"refs", b"heads", b"master.lock") ) ) self.assertFalse(os.path.exists(os.path.join(self._refs.path, b"HEAD.lock"))) def test_remove_packed_without_peeled(self): refs_file = os.path.join(self._repo.path, "packed-refs") f = GitFile(refs_file) refs_data = f.read() f.close() f = GitFile(refs_file, "wb") f.write( b"\n".join( line for line in refs_data.split(b"\n") if not line or line[0] not in b"#^" ) ) f.close() self._repo = Repo(self._repo.path) refs = self._repo.refs self.assertTrue( refs.remove_if_equals( - b"refs/heads/packed", b"42d06bd4b77fed026b154d16493e5deab78f02ec" + b"refs/heads/packed", + b"42d06bd4b77fed026b154d16493e5deab78f02ec", ) ) def test_remove_if_equals_packed(self): # test removing ref that is only packed self.assertEqual( b"df6800012397fb85c56e7418dd4eb9405dee075c", self._refs[b"refs/tags/refs-0.1"], ) self.assertTrue( self._refs.remove_if_equals( - b"refs/tags/refs-0.1", b"df6800012397fb85c56e7418dd4eb9405dee075c" + b"refs/tags/refs-0.1", + b"df6800012397fb85c56e7418dd4eb9405dee075c", ) ) self.assertRaises(KeyError, lambda: self._refs[b"refs/tags/refs-0.1"]) def test_remove_parent(self): self._refs[b"refs/heads/foo/bar"] = b"df6800012397fb85c56e7418dd4eb9405dee075c" del self._refs[b"refs/heads/foo/bar"] ref_file = os.path.join( self._refs.path, b"refs", b"heads", b"foo", b"bar", ) self.assertFalse(os.path.exists(ref_file)) ref_file = os.path.join(self._refs.path, b"refs", b"heads", b"foo") self.assertFalse(os.path.exists(ref_file)) ref_file = os.path.join(self._refs.path, b"refs", b"heads") self.assertTrue(os.path.exists(ref_file)) self._refs[b"refs/heads/foo"] = b"df6800012397fb85c56e7418dd4eb9405dee075c" def test_read_ref(self): self.assertEqual(b"ref: refs/heads/master", self._refs.read_ref(b"HEAD")) self.assertEqual( b"42d06bd4b77fed026b154d16493e5deab78f02ec", self._refs.read_ref(b"refs/heads/packed"), ) self.assertEqual(None, self._refs.read_ref(b"nonexistant")) def test_read_loose_ref(self): self._refs[b"refs/heads/foo"] = b"df6800012397fb85c56e7418dd4eb9405dee075c" self.assertEqual(None, self._refs.read_ref(b"refs/heads/foo/bar")) def test_non_ascii(self): try: encoded_ref = os.fsencode(u"refs/tags/schön") except UnicodeEncodeError: raise SkipTest("filesystem encoding doesn't support special character") p = os.path.join(os.fsencode(self._repo.path), encoded_ref) with open(p, "w") as f: f.write("00" * 20) expected_refs = dict(_TEST_REFS) expected_refs[encoded_ref] = b"00" * 20 del expected_refs[b"refs/heads/loop"] self.assertEqual(expected_refs, self._repo.get_refs()) def test_cyrillic(self): if sys.platform in ("darwin", "win32"): raise SkipTest("filesystem encoding doesn't support arbitrary bytes") # reported in https://github.com/dulwich/dulwich/issues/608 name = b"\xcd\xee\xe2\xe0\xff\xe2\xe5\xf2\xea\xe01" encoded_ref = b"refs/heads/" + name with open(os.path.join(os.fsencode(self._repo.path), encoded_ref), "w") as f: f.write("00" * 20) expected_refs = set(_TEST_REFS.keys()) expected_refs.add(encoded_ref) self.assertEqual(expected_refs, set(self._repo.refs.allkeys())) self.assertEqual( {r[len(b"refs/") :] for r in expected_refs if r.startswith(b"refs/")}, set(self._repo.refs.subkeys(b"refs/")), ) expected_refs.remove(b"refs/heads/loop") expected_refs.add(b"HEAD") self.assertEqual(expected_refs, set(self._repo.get_refs().keys())) _TEST_REFS_SERIALIZED = ( b"42d06bd4b77fed026b154d16493e5deab78f02ec\t" b"refs/heads/40-char-ref-aaaaaaaaaaaaaaaaaa\n" b"42d06bd4b77fed026b154d16493e5deab78f02ec\trefs/heads/master\n" b"42d06bd4b77fed026b154d16493e5deab78f02ec\trefs/heads/packed\n" b"df6800012397fb85c56e7418dd4eb9405dee075c\trefs/tags/refs-0.1\n" b"3ec9c43c84ff242e3ef4a9fc5bc111fd780a76a8\trefs/tags/refs-0.2\n" ) class InfoRefsContainerTests(TestCase): def test_invalid_refname(self): text = _TEST_REFS_SERIALIZED + b"00" * 20 + b"\trefs/stash\n" refs = InfoRefsContainer(BytesIO(text)) expected_refs = dict(_TEST_REFS) del expected_refs[b"HEAD"] expected_refs[b"refs/stash"] = b"00" * 20 del expected_refs[b"refs/heads/loop"] self.assertEqual(expected_refs, refs.as_dict()) def test_keys(self): refs = InfoRefsContainer(BytesIO(_TEST_REFS_SERIALIZED)) actual_keys = set(refs.keys()) self.assertEqual(set(refs.allkeys()), actual_keys) expected_refs = dict(_TEST_REFS) del expected_refs[b"HEAD"] del expected_refs[b"refs/heads/loop"] self.assertEqual(set(expected_refs.keys()), actual_keys) actual_keys = refs.keys(b"refs/heads") actual_keys.discard(b"loop") self.assertEqual( [b"40-char-ref-aaaaaaaaaaaaaaaaaa", b"master", b"packed"], sorted(actual_keys), ) self.assertEqual([b"refs-0.1", b"refs-0.2"], sorted(refs.keys(b"refs/tags"))) def test_as_dict(self): refs = InfoRefsContainer(BytesIO(_TEST_REFS_SERIALIZED)) # refs/heads/loop does not show up even if it exists expected_refs = dict(_TEST_REFS) del expected_refs[b"HEAD"] del expected_refs[b"refs/heads/loop"] self.assertEqual(expected_refs, refs.as_dict()) def test_contains(self): refs = InfoRefsContainer(BytesIO(_TEST_REFS_SERIALIZED)) self.assertTrue(b"refs/heads/master" in refs) self.assertFalse(b"refs/heads/bar" in refs) def test_get_peeled(self): refs = InfoRefsContainer(BytesIO(_TEST_REFS_SERIALIZED)) # refs/heads/loop does not show up even if it exists self.assertEqual( - _TEST_REFS[b"refs/heads/master"], refs.get_peeled(b"refs/heads/master") + _TEST_REFS[b"refs/heads/master"], + refs.get_peeled(b"refs/heads/master"), ) class ParseSymrefValueTests(TestCase): def test_valid(self): self.assertEqual(b"refs/heads/foo", parse_symref_value(b"ref: refs/heads/foo")) def test_invalid(self): self.assertRaises(ValueError, parse_symref_value, b"foobar") class StripPeeledRefsTests(TestCase): all_refs = { b"refs/heads/master": b"8843d7f92416211de9ebb963ff4ce28125932878", b"refs/heads/testing": b"186a005b134d8639a58b6731c7c1ea821a6eedba", b"refs/tags/1.0.0": b"a93db4b0360cc635a2b93675010bac8d101f73f0", b"refs/tags/1.0.0^{}": b"a93db4b0360cc635a2b93675010bac8d101f73f0", b"refs/tags/2.0.0": b"0749936d0956c661ac8f8d3483774509c165f89e", b"refs/tags/2.0.0^{}": b"0749936d0956c661ac8f8d3483774509c165f89e", } non_peeled_refs = { b"refs/heads/master": b"8843d7f92416211de9ebb963ff4ce28125932878", b"refs/heads/testing": b"186a005b134d8639a58b6731c7c1ea821a6eedba", b"refs/tags/1.0.0": b"a93db4b0360cc635a2b93675010bac8d101f73f0", b"refs/tags/2.0.0": b"0749936d0956c661ac8f8d3483774509c165f89e", } def test_strip_peeled_refs(self): # Simple check of two dicts self.assertEqual(strip_peeled_refs(self.all_refs), self.non_peeled_refs) diff --git a/dulwich/tests/test_repository.py b/dulwich/tests/test_repository.py index 65394829..b476b798 100644 --- a/dulwich/tests/test_repository.py +++ b/dulwich/tests/test_repository.py @@ -1,1255 +1,1268 @@ # -*- coding: utf-8 -*- # test_repository.py -- tests for repository.py # Copyright (C) 2007 James Westby # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Tests for the repository.""" import locale import os import stat import shutil import sys import tempfile import warnings from dulwich import errors from dulwich.object_store import ( tree_lookup_path, ) from dulwich import objects from dulwich.config import Config from dulwich.errors import NotGitRepository from dulwich.repo import ( InvalidUserIdentity, Repo, MemoryRepo, check_user_identity, UnsupportedVersion, ) from dulwich.tests import ( TestCase, skipIf, ) from dulwich.tests.utils import ( open_repo, tear_down_repo, setup_warning_catcher, ) missing_sha = b"b91fa4d900e17e99b433218e988c4eb4a3e9a097" class CreateRepositoryTests(TestCase): def assertFileContentsEqual(self, expected, repo, path): f = repo.get_named_file(path) if not f: self.assertEqual(expected, None) else: with f: self.assertEqual(expected, f.read()) def _check_repo_contents(self, repo, expect_bare): self.assertEqual(expect_bare, repo.bare) self.assertFileContentsEqual(b"Unnamed repository", repo, "description") self.assertFileContentsEqual(b"", repo, os.path.join("info", "exclude")) self.assertFileContentsEqual(None, repo, "nonexistent file") barestr = b"bare = " + str(expect_bare).lower().encode("ascii") with repo.get_named_file("config") as f: config_text = f.read() self.assertTrue(barestr in config_text, "%r" % config_text) expect_filemode = sys.platform != "win32" barestr = b"filemode = " + str(expect_filemode).lower().encode("ascii") with repo.get_named_file("config") as f: config_text = f.read() self.assertTrue(barestr in config_text, "%r" % config_text) def test_create_memory(self): repo = MemoryRepo.init_bare([], {}) self._check_repo_contents(repo, True) def test_create_disk_bare(self): tmp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) repo = Repo.init_bare(tmp_dir) self.assertEqual(tmp_dir, repo._controldir) self._check_repo_contents(repo, True) def test_create_disk_non_bare(self): tmp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) repo = Repo.init(tmp_dir) self.assertEqual(os.path.join(tmp_dir, ".git"), repo._controldir) self._check_repo_contents(repo, False) def test_create_disk_non_bare_mkdir(self): tmp_dir = tempfile.mkdtemp() target_dir = os.path.join(tmp_dir, "target") self.addCleanup(shutil.rmtree, tmp_dir) repo = Repo.init(target_dir, mkdir=True) self.assertEqual(os.path.join(target_dir, ".git"), repo._controldir) self._check_repo_contents(repo, False) def test_create_disk_bare_mkdir(self): tmp_dir = tempfile.mkdtemp() target_dir = os.path.join(tmp_dir, "target") self.addCleanup(shutil.rmtree, tmp_dir) repo = Repo.init_bare(target_dir, mkdir=True) self.assertEqual(target_dir, repo._controldir) self._check_repo_contents(repo, True) class MemoryRepoTests(TestCase): def test_set_description(self): r = MemoryRepo.init_bare([], {}) description = b"Some description" r.set_description(description) self.assertEqual(description, r.get_description()) class RepositoryRootTests(TestCase): def mkdtemp(self): return tempfile.mkdtemp() def open_repo(self, name): temp_dir = self.mkdtemp() repo = open_repo(name, temp_dir) self.addCleanup(tear_down_repo, repo) return repo def test_simple_props(self): r = self.open_repo("a.git") self.assertEqual(r.controldir(), r.path) def test_setitem(self): r = self.open_repo("a.git") r[b"refs/tags/foo"] = b"a90fa2d900a17e99b433217e988c4eb4a2e9a097" self.assertEqual( b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", r[b"refs/tags/foo"].id ) def test_getitem_unicode(self): r = self.open_repo("a.git") test_keys = [ (b"refs/heads/master", True), (b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", True), (b"11" * 19 + b"--", False), ] for k, contained in test_keys: self.assertEqual(k in r, contained) # Avoid deprecation warning under Py3.2+ if getattr(self, "assertRaisesRegex", None): assertRaisesRegexp = self.assertRaisesRegex else: assertRaisesRegexp = self.assertRaisesRegexp for k, _ in test_keys: assertRaisesRegexp( - TypeError, "'name' must be bytestring, not int", r.__getitem__, 12 + TypeError, + "'name' must be bytestring, not int", + r.__getitem__, + 12, ) def test_delitem(self): r = self.open_repo("a.git") del r[b"refs/heads/master"] self.assertRaises(KeyError, lambda: r[b"refs/heads/master"]) del r[b"HEAD"] self.assertRaises(KeyError, lambda: r[b"HEAD"]) self.assertRaises(ValueError, r.__delitem__, b"notrefs/foo") def test_get_refs(self): r = self.open_repo("a.git") self.assertEqual( { b"HEAD": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", b"refs/heads/master": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", b"refs/tags/mytag": b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a", b"refs/tags/mytag-packed": b"b0931cadc54336e78a1d980420e3268903b57a50", }, r.get_refs(), ) def test_head(self): r = self.open_repo("a.git") self.assertEqual(r.head(), b"a90fa2d900a17e99b433217e988c4eb4a2e9a097") def test_get_object(self): r = self.open_repo("a.git") obj = r.get_object(r.head()) self.assertEqual(obj.type_name, b"commit") def test_get_object_non_existant(self): r = self.open_repo("a.git") self.assertRaises(KeyError, r.get_object, missing_sha) def test_contains_object(self): r = self.open_repo("a.git") self.assertTrue(r.head() in r) self.assertFalse(b"z" * 40 in r) def test_contains_ref(self): r = self.open_repo("a.git") self.assertTrue(b"HEAD" in r) def test_get_no_description(self): r = self.open_repo("a.git") self.assertIs(None, r.get_description()) def test_get_description(self): r = self.open_repo("a.git") with open(os.path.join(r.path, "description"), "wb") as f: f.write(b"Some description") self.assertEqual(b"Some description", r.get_description()) def test_set_description(self): r = self.open_repo("a.git") description = b"Some description" r.set_description(description) self.assertEqual(description, r.get_description()) def test_contains_missing(self): r = self.open_repo("a.git") self.assertFalse(b"bar" in r) def test_get_peeled(self): # unpacked ref r = self.open_repo("a.git") tag_sha = b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a" self.assertNotEqual(r[tag_sha].sha().hexdigest(), r.head()) self.assertEqual(r.get_peeled(b"refs/tags/mytag"), r.head()) # packed ref with cached peeled value packed_tag_sha = b"b0931cadc54336e78a1d980420e3268903b57a50" parent_sha = r[r.head()].parents[0] self.assertNotEqual(r[packed_tag_sha].sha().hexdigest(), parent_sha) self.assertEqual(r.get_peeled(b"refs/tags/mytag-packed"), parent_sha) # TODO: add more corner cases to test repo def test_get_peeled_not_tag(self): r = self.open_repo("a.git") self.assertEqual(r.get_peeled(b"HEAD"), r.head()) def test_get_parents(self): r = self.open_repo("a.git") self.assertEqual( [b"2a72d929692c41d8554c07f6301757ba18a65d91"], r.get_parents(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"), ) r.update_shallow([b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"], None) self.assertEqual([], r.get_parents(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097")) def test_get_walker(self): r = self.open_repo("a.git") # include defaults to [r.head()] self.assertEqual( [e.commit.id for e in r.get_walker()], [r.head(), b"2a72d929692c41d8554c07f6301757ba18a65d91"], ) self.assertEqual( [ e.commit.id for e in r.get_walker([b"2a72d929692c41d8554c07f6301757ba18a65d91"]) ], [b"2a72d929692c41d8554c07f6301757ba18a65d91"], ) self.assertEqual( [ e.commit.id for e in r.get_walker(b"2a72d929692c41d8554c07f6301757ba18a65d91") ], [b"2a72d929692c41d8554c07f6301757ba18a65d91"], ) def assertFilesystemHidden(self, path): if sys.platform != "win32": return import ctypes from ctypes.wintypes import DWORD, LPCWSTR GetFileAttributesW = ctypes.WINFUNCTYPE(DWORD, LPCWSTR)( ("GetFileAttributesW", ctypes.windll.kernel32) ) self.assertTrue(2 & GetFileAttributesW(path)) def test_init_existing(self): tmp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) t = Repo.init(tmp_dir) self.addCleanup(t.close) self.assertEqual(os.listdir(tmp_dir), [".git"]) self.assertFilesystemHidden(os.path.join(tmp_dir, ".git")) def test_init_mkdir(self): tmp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) repo_dir = os.path.join(tmp_dir, "a-repo") t = Repo.init(repo_dir, mkdir=True) self.addCleanup(t.close) self.assertEqual(os.listdir(repo_dir), [".git"]) self.assertFilesystemHidden(os.path.join(repo_dir, ".git")) def test_init_mkdir_unicode(self): repo_name = u"\xa7" try: os.fsencode(repo_name) except UnicodeEncodeError: self.skipTest("filesystem lacks unicode support") tmp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) repo_dir = os.path.join(tmp_dir, repo_name) t = Repo.init(repo_dir, mkdir=True) self.addCleanup(t.close) self.assertEqual(os.listdir(repo_dir), [".git"]) self.assertFilesystemHidden(os.path.join(repo_dir, ".git")) @skipIf(sys.platform == "win32", "fails on Windows") def test_fetch(self): r = self.open_repo("a.git") tmp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) t = Repo.init(tmp_dir) self.addCleanup(t.close) r.fetch(t) self.assertIn(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", t) self.assertIn(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", t) self.assertIn(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", t) self.assertIn(b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a", t) self.assertIn(b"b0931cadc54336e78a1d980420e3268903b57a50", t) @skipIf(sys.platform == "win32", "fails on Windows") def test_fetch_ignores_missing_refs(self): r = self.open_repo("a.git") missing = b"1234566789123456789123567891234657373833" r.refs[b"refs/heads/blah"] = missing tmp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) t = Repo.init(tmp_dir) self.addCleanup(t.close) r.fetch(t) self.assertIn(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", t) self.assertIn(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", t) self.assertIn(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", t) self.assertIn(b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a", t) self.assertIn(b"b0931cadc54336e78a1d980420e3268903b57a50", t) self.assertNotIn(missing, t) def test_clone(self): r = self.open_repo("a.git") tmp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) with r.clone(tmp_dir, mkdir=False) as t: self.assertEqual( { b"HEAD": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", b"refs/remotes/origin/master": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", b"refs/heads/master": b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", b"refs/tags/mytag": b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a", b"refs/tags/mytag-packed": b"b0931cadc54336e78a1d980420e3268903b57a50", }, t.refs.as_dict(), ) shas = [e.commit.id for e in r.get_walker()] self.assertEqual( shas, [t.head(), b"2a72d929692c41d8554c07f6301757ba18a65d91"] ) c = t.get_config() encoded_path = r.path if not isinstance(encoded_path, bytes): encoded_path = os.fsencode(encoded_path) self.assertEqual(encoded_path, c.get((b"remote", b"origin"), b"url")) self.assertEqual( b"+refs/heads/*:refs/remotes/origin/*", c.get((b"remote", b"origin"), b"fetch"), ) def test_clone_no_head(self): temp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, temp_dir) repo_dir = os.path.join(os.path.dirname(__file__), "data", "repos") dest_dir = os.path.join(temp_dir, "a.git") shutil.copytree(os.path.join(repo_dir, "a.git"), dest_dir, symlinks=True) r = Repo(dest_dir) del r.refs[b"refs/heads/master"] del r.refs[b"HEAD"] t = r.clone(os.path.join(temp_dir, "b.git"), mkdir=True) self.assertEqual( { b"refs/tags/mytag": b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a", b"refs/tags/mytag-packed": b"b0931cadc54336e78a1d980420e3268903b57a50", }, t.refs.as_dict(), ) def test_clone_empty(self): """Test clone() doesn't crash if HEAD points to a non-existing ref. This simulates cloning server-side bare repository either when it is still empty or if user renames master branch and pushes private repo to the server. Non-bare repo HEAD always points to an existing ref. """ r = self.open_repo("empty.git") tmp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) r.clone(tmp_dir, mkdir=False, bare=True) def test_clone_bare(self): r = self.open_repo("a.git") tmp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) t = r.clone(tmp_dir, mkdir=False) t.close() def test_clone_checkout_and_bare(self): r = self.open_repo("a.git") tmp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) self.assertRaises( ValueError, r.clone, tmp_dir, mkdir=False, checkout=True, bare=True ) def test_merge_history(self): r = self.open_repo("simple_merge.git") shas = [e.commit.id for e in r.get_walker()] self.assertEqual( shas, [ b"5dac377bdded4c9aeb8dff595f0faeebcc8498cc", b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd", b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6", b"60dacdc733de308bb77bb76ce0fb0f9b44c9769e", b"0d89f20333fbb1d2f3a94da77f4981373d8f4310", ], ) def test_out_of_order_merge(self): """Test that revision history is ordered by date, not parent order.""" r = self.open_repo("ooo_merge.git") shas = [e.commit.id for e in r.get_walker()] self.assertEqual( shas, [ b"7601d7f6231db6a57f7bbb79ee52e4d462fd44d1", b"f507291b64138b875c28e03469025b1ea20bc614", b"fb5b0425c7ce46959bec94d54b9a157645e114f5", b"f9e39b120c68182a4ba35349f832d0e4e61f485c", ], ) def test_get_tags_empty(self): r = self.open_repo("ooo_merge.git") self.assertEqual({}, r.refs.as_dict(b"refs/tags")) def test_get_config(self): r = self.open_repo("ooo_merge.git") self.assertIsInstance(r.get_config(), Config) def test_get_config_stack(self): r = self.open_repo("ooo_merge.git") self.assertIsInstance(r.get_config_stack(), Config) def test_common_revisions(self): """ This test demonstrates that ``find_common_revisions()`` actually returns common heads, not revisions; dulwich already uses ``find_common_revisions()`` in such a manner (see ``Repo.fetch_objects()``). """ expected_shas = set([b"60dacdc733de308bb77bb76ce0fb0f9b44c9769e"]) # Source for objects. r_base = self.open_repo("simple_merge.git") # Re-create each-side of the merge in simple_merge.git. # # Since the trees and blobs are missing, the repository created is # corrupted, but we're only checking for commits for the purpose of # this test, so it's immaterial. r1_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, r1_dir) r1_commits = [ b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd", # HEAD b"60dacdc733de308bb77bb76ce0fb0f9b44c9769e", b"0d89f20333fbb1d2f3a94da77f4981373d8f4310", ] r2_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, r2_dir) r2_commits = [ b"4cffe90e0a41ad3f5190079d7c8f036bde29cbe6", # HEAD b"60dacdc733de308bb77bb76ce0fb0f9b44c9769e", b"0d89f20333fbb1d2f3a94da77f4981373d8f4310", ] r1 = Repo.init_bare(r1_dir) for c in r1_commits: r1.object_store.add_object(r_base.get_object(c)) r1.refs[b"HEAD"] = r1_commits[0] r2 = Repo.init_bare(r2_dir) for c in r2_commits: r2.object_store.add_object(r_base.get_object(c)) r2.refs[b"HEAD"] = r2_commits[0] # Finally, the 'real' testing! shas = r2.object_store.find_common_revisions(r1.get_graph_walker()) self.assertEqual(set(shas), expected_shas) shas = r1.object_store.find_common_revisions(r2.get_graph_walker()) self.assertEqual(set(shas), expected_shas) def test_shell_hook_pre_commit(self): if os.name != "posix": self.skipTest("shell hook tests requires POSIX shell") pre_commit_fail = """#!/bin/sh exit 1 """ pre_commit_success = """#!/bin/sh exit 0 """ repo_dir = os.path.join(self.mkdtemp()) self.addCleanup(shutil.rmtree, repo_dir) r = Repo.init(repo_dir) self.addCleanup(r.close) pre_commit = os.path.join(r.controldir(), "hooks", "pre-commit") with open(pre_commit, "w") as f: f.write(pre_commit_fail) os.chmod(pre_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) self.assertRaises( errors.CommitError, r.do_commit, "failed commit", committer="Test Committer ", author="Test Author ", commit_timestamp=12345, commit_timezone=0, author_timestamp=12345, author_timezone=0, ) with open(pre_commit, "w") as f: f.write(pre_commit_success) os.chmod(pre_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) commit_sha = r.do_commit( b"empty commit", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ) self.assertEqual([], r[commit_sha].parents) def test_shell_hook_commit_msg(self): if os.name != "posix": self.skipTest("shell hook tests requires POSIX shell") commit_msg_fail = """#!/bin/sh exit 1 """ commit_msg_success = """#!/bin/sh exit 0 """ repo_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) r = Repo.init(repo_dir) self.addCleanup(r.close) commit_msg = os.path.join(r.controldir(), "hooks", "commit-msg") with open(commit_msg, "w") as f: f.write(commit_msg_fail) os.chmod(commit_msg, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) self.assertRaises( errors.CommitError, r.do_commit, b"failed commit", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12345, commit_timezone=0, author_timestamp=12345, author_timezone=0, ) with open(commit_msg, "w") as f: f.write(commit_msg_success) os.chmod(commit_msg, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) commit_sha = r.do_commit( b"empty commit", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ) self.assertEqual([], r[commit_sha].parents) def test_shell_hook_post_commit(self): if os.name != "posix": self.skipTest("shell hook tests requires POSIX shell") repo_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) r = Repo.init(repo_dir) self.addCleanup(r.close) (fd, path) = tempfile.mkstemp(dir=repo_dir) os.close(fd) post_commit_msg = ( """#!/bin/sh rm """ + path + """ """ ) root_sha = r.do_commit( b"empty commit", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12345, commit_timezone=0, author_timestamp=12345, author_timezone=0, ) self.assertEqual([], r[root_sha].parents) post_commit = os.path.join(r.controldir(), "hooks", "post-commit") with open(post_commit, "wb") as f: f.write(post_commit_msg.encode(locale.getpreferredencoding())) os.chmod(post_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) commit_sha = r.do_commit( b"empty commit", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12345, commit_timezone=0, author_timestamp=12345, author_timezone=0, ) self.assertEqual([root_sha], r[commit_sha].parents) self.assertFalse(os.path.exists(path)) post_commit_msg_fail = """#!/bin/sh exit 1 """ with open(post_commit, "w") as f: f.write(post_commit_msg_fail) os.chmod(post_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) warnings.simplefilter("always", UserWarning) self.addCleanup(warnings.resetwarnings) warnings_list, restore_warnings = setup_warning_catcher() self.addCleanup(restore_warnings) commit_sha2 = r.do_commit( b"empty commit", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12345, commit_timezone=0, author_timestamp=12345, author_timezone=0, ) expected_warning = UserWarning( "post-commit hook failed: Hook post-commit exited with " "non-zero status 1", ) for w in warnings_list: if type(w) == type(expected_warning) and w.args == expected_warning.args: break else: raise AssertionError( "Expected warning %r not in %r" % (expected_warning, warnings_list) ) self.assertEqual([commit_sha], r[commit_sha2].parents) def test_as_dict(self): def check(repo): self.assertEqual( - repo.refs.subkeys(b"refs/tags"), repo.refs.subkeys(b"refs/tags/") + repo.refs.subkeys(b"refs/tags"), + repo.refs.subkeys(b"refs/tags/"), ) self.assertEqual( - repo.refs.as_dict(b"refs/tags"), repo.refs.as_dict(b"refs/tags/") + repo.refs.as_dict(b"refs/tags"), + repo.refs.as_dict(b"refs/tags/"), ) self.assertEqual( - repo.refs.as_dict(b"refs/heads"), repo.refs.as_dict(b"refs/heads/") + repo.refs.as_dict(b"refs/heads"), + repo.refs.as_dict(b"refs/heads/"), ) bare = self.open_repo("a.git") tmp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) with bare.clone(tmp_dir, mkdir=False) as nonbare: check(nonbare) check(bare) def test_working_tree(self): temp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, temp_dir) worktree_temp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, worktree_temp_dir) r = Repo.init(temp_dir) self.addCleanup(r.close) root_sha = r.do_commit( b"empty commit", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12345, commit_timezone=0, author_timestamp=12345, author_timezone=0, ) r.refs[b"refs/heads/master"] = root_sha w = Repo._init_new_working_directory(worktree_temp_dir, r) self.addCleanup(w.close) new_sha = w.do_commit( b"new commit", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12345, commit_timezone=0, author_timestamp=12345, author_timezone=0, ) w.refs[b"HEAD"] = new_sha self.assertEqual( os.path.abspath(r.controldir()), os.path.abspath(w.commondir()) ) self.assertEqual(r.refs.keys(), w.refs.keys()) self.assertNotEqual(r.head(), w.head()) class BuildRepoRootTests(TestCase): """Tests that build on-disk repos from scratch. Repos live in a temp dir and are torn down after each test. They start with a single commit in master having single file named 'a'. """ def get_repo_dir(self): return os.path.join(tempfile.mkdtemp(), "test") def setUp(self): super(BuildRepoRootTests, self).setUp() self._repo_dir = self.get_repo_dir() os.makedirs(self._repo_dir) r = self._repo = Repo.init(self._repo_dir) self.addCleanup(tear_down_repo, r) self.assertFalse(r.bare) self.assertEqual(b"ref: refs/heads/master", r.refs.read_ref(b"HEAD")) self.assertRaises(KeyError, lambda: r.refs[b"refs/heads/master"]) with open(os.path.join(r.path, "a"), "wb") as f: f.write(b"file contents") r.stage(["a"]) commit_sha = r.do_commit( b"msg", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12345, commit_timezone=0, author_timestamp=12345, author_timezone=0, ) self.assertEqual([], r[commit_sha].parents) self._root_commit = commit_sha def test_get_shallow(self): self.assertEqual(set(), self._repo.get_shallow()) with open(os.path.join(self._repo.path, ".git", "shallow"), "wb") as f: f.write(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097\n") self.assertEqual( - {b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"}, self._repo.get_shallow() + {b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"}, + self._repo.get_shallow(), ) def test_update_shallow(self): self._repo.update_shallow(None, None) # no op self.assertEqual(set(), self._repo.get_shallow()) self._repo.update_shallow([b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"], None) self.assertEqual( - {b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"}, self._repo.get_shallow() + {b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"}, + self._repo.get_shallow(), ) self._repo.update_shallow( [b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"], [b"f9e39b120c68182a4ba35349f832d0e4e61f485c"], ) self.assertEqual( - {b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"}, self._repo.get_shallow() + {b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"}, + self._repo.get_shallow(), ) def test_build_repo(self): r = self._repo self.assertEqual(b"ref: refs/heads/master", r.refs.read_ref(b"HEAD")) self.assertEqual(self._root_commit, r.refs[b"refs/heads/master"]) expected_blob = objects.Blob.from_string(b"file contents") self.assertEqual(expected_blob.data, r[expected_blob.id].data) actual_commit = r[self._root_commit] self.assertEqual(b"msg", actual_commit.message) def test_commit_modified(self): r = self._repo with open(os.path.join(r.path, "a"), "wb") as f: f.write(b"new contents") r.stage(["a"]) commit_sha = r.do_commit( b"modified a", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ) self.assertEqual([self._root_commit], r[commit_sha].parents) a_mode, a_id = tree_lookup_path(r.get_object, r[commit_sha].tree, b"a") self.assertEqual(stat.S_IFREG | 0o644, a_mode) self.assertEqual(b"new contents", r[a_id].data) @skipIf(not getattr(os, "symlink", None), "Requires symlink support") def test_commit_symlink(self): r = self._repo os.symlink("a", os.path.join(r.path, "b")) r.stage(["a", "b"]) commit_sha = r.do_commit( b"Symlink b", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ) self.assertEqual([self._root_commit], r[commit_sha].parents) b_mode, b_id = tree_lookup_path(r.get_object, r[commit_sha].tree, b"b") self.assertTrue(stat.S_ISLNK(b_mode)) self.assertEqual(b"a", r[b_id].data) def test_commit_merge_heads_file(self): tmp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) r = Repo.init(tmp_dir) with open(os.path.join(r.path, "a"), "w") as f: f.write("initial text") c1 = r.do_commit( b"initial commit", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ) with open(os.path.join(r.path, "a"), "w") as f: f.write("merged text") with open(os.path.join(r.path, ".git", "MERGE_HEADS"), "w") as f: f.write("c27a2d21dd136312d7fa9e8baabb82561a1727d0\n") r.stage(["a"]) commit_sha = r.do_commit( b"deleted a", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ) self.assertEqual( - [c1, b"c27a2d21dd136312d7fa9e8baabb82561a1727d0"], r[commit_sha].parents + [c1, b"c27a2d21dd136312d7fa9e8baabb82561a1727d0"], + r[commit_sha].parents, ) def test_commit_deleted(self): r = self._repo os.remove(os.path.join(r.path, "a")) r.stage(["a"]) commit_sha = r.do_commit( b"deleted a", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ) self.assertEqual([self._root_commit], r[commit_sha].parents) self.assertEqual([], list(r.open_index())) tree = r[r[commit_sha].tree] self.assertEqual([], list(tree.iteritems())) def test_commit_follows(self): r = self._repo r.refs.set_symbolic_ref(b"HEAD", b"refs/heads/bla") commit_sha = r.do_commit( b"commit with strange character", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ref=b"HEAD", ) self.assertEqual(commit_sha, r[b"refs/heads/bla"].id) def test_commit_encoding(self): r = self._repo commit_sha = r.do_commit( b"commit with strange character \xee", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, encoding=b"iso8859-1", ) self.assertEqual(b"iso8859-1", r[commit_sha].encoding) def test_compression_level(self): r = self._repo c = r.get_config() c.set(("core",), "compression", "3") c.set(("core",), "looseCompression", "4") c.write_to_path() r = Repo(self._repo_dir) self.assertEqual(r.object_store.loose_compression_level, 4) def test_repositoryformatversion(self): r = self._repo c = r.get_config() c.set(("core",), "repositoryformatversion", "2") c.write_to_path() self.assertRaises(UnsupportedVersion, Repo, self._repo_dir) def test_commit_encoding_from_config(self): r = self._repo c = r.get_config() c.set(("i18n",), "commitEncoding", "iso8859-1") c.write_to_path() commit_sha = r.do_commit( b"commit with strange character \xee", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ) self.assertEqual(b"iso8859-1", r[commit_sha].encoding) def test_commit_config_identity(self): # commit falls back to the users' identity if it wasn't specified r = self._repo c = r.get_config() c.set((b"user",), b"name", b"Jelmer") c.set((b"user",), b"email", b"jelmer@apache.org") c.write_to_path() commit_sha = r.do_commit(b"message") self.assertEqual(b"Jelmer ", r[commit_sha].author) self.assertEqual(b"Jelmer ", r[commit_sha].committer) def test_commit_config_identity_strips_than(self): # commit falls back to the users' identity if it wasn't specified, # and strips superfluous <> r = self._repo c = r.get_config() c.set((b"user",), b"name", b"Jelmer") c.set((b"user",), b"email", b"") c.write_to_path() commit_sha = r.do_commit(b"message") self.assertEqual(b"Jelmer ", r[commit_sha].author) self.assertEqual(b"Jelmer ", r[commit_sha].committer) def test_commit_config_identity_in_memoryrepo(self): # commit falls back to the users' identity if it wasn't specified r = MemoryRepo.init_bare([], {}) c = r.get_config() c.set((b"user",), b"name", b"Jelmer") c.set((b"user",), b"email", b"jelmer@apache.org") commit_sha = r.do_commit(b"message", tree=objects.Tree().id) self.assertEqual(b"Jelmer ", r[commit_sha].author) self.assertEqual(b"Jelmer ", r[commit_sha].committer) def overrideEnv(self, name, value): def restore(): if oldval is not None: os.environ[name] = oldval else: del os.environ[name] oldval = os.environ.get(name) os.environ[name] = value self.addCleanup(restore) def test_commit_config_identity_from_env(self): # commit falls back to the users' identity if it wasn't specified self.overrideEnv("GIT_COMMITTER_NAME", "joe") self.overrideEnv("GIT_COMMITTER_EMAIL", "joe@example.com") r = self._repo c = r.get_config() c.set((b"user",), b"name", b"Jelmer") c.set((b"user",), b"email", b"jelmer@apache.org") c.write_to_path() commit_sha = r.do_commit(b"message") self.assertEqual(b"Jelmer ", r[commit_sha].author) self.assertEqual(b"joe ", r[commit_sha].committer) def test_commit_fail_ref(self): r = self._repo def set_if_equals(name, old_ref, new_ref, **kwargs): return False r.refs.set_if_equals = set_if_equals def add_if_new(name, new_ref, **kwargs): self.fail("Unexpected call to add_if_new") r.refs.add_if_new = add_if_new old_shas = set(r.object_store) self.assertRaises( errors.CommitError, r.do_commit, b"failed commit", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12345, commit_timezone=0, author_timestamp=12345, author_timezone=0, ) new_shas = set(r.object_store) - old_shas self.assertEqual(1, len(new_shas)) # Check that the new commit (now garbage) was added. new_commit = r[new_shas.pop()] self.assertEqual(r[self._root_commit].tree, new_commit.tree) self.assertEqual(b"failed commit", new_commit.message) def test_commit_branch(self): r = self._repo commit_sha = r.do_commit( b"commit to branch", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ref=b"refs/heads/new_branch", ) self.assertEqual(self._root_commit, r[b"HEAD"].id) self.assertEqual(commit_sha, r[b"refs/heads/new_branch"].id) self.assertEqual([], r[commit_sha].parents) self.assertTrue(b"refs/heads/new_branch" in r) new_branch_head = commit_sha commit_sha = r.do_commit( b"commit to branch 2", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ref=b"refs/heads/new_branch", ) self.assertEqual(self._root_commit, r[b"HEAD"].id) self.assertEqual(commit_sha, r[b"refs/heads/new_branch"].id) self.assertEqual([new_branch_head], r[commit_sha].parents) def test_commit_merge_heads(self): r = self._repo merge_1 = r.do_commit( b"commit to branch 2", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ref=b"refs/heads/new_branch", ) commit_sha = r.do_commit( b"commit with merge", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, merge_heads=[merge_1], ) self.assertEqual([self._root_commit, merge_1], r[commit_sha].parents) def test_commit_dangling_commit(self): r = self._repo old_shas = set(r.object_store) old_refs = r.get_refs() commit_sha = r.do_commit( b"commit with no ref", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ref=None, ) new_shas = set(r.object_store) - old_shas # New sha is added, but no new refs self.assertEqual(1, len(new_shas)) new_commit = r[new_shas.pop()] self.assertEqual(r[self._root_commit].tree, new_commit.tree) self.assertEqual([], r[commit_sha].parents) self.assertEqual(old_refs, r.get_refs()) def test_commit_dangling_commit_with_parents(self): r = self._repo old_shas = set(r.object_store) old_refs = r.get_refs() commit_sha = r.do_commit( b"commit with no ref", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ref=None, merge_heads=[self._root_commit], ) new_shas = set(r.object_store) - old_shas # New sha is added, but no new refs self.assertEqual(1, len(new_shas)) new_commit = r[new_shas.pop()] self.assertEqual(r[self._root_commit].tree, new_commit.tree) self.assertEqual([self._root_commit], r[commit_sha].parents) self.assertEqual(old_refs, r.get_refs()) def test_stage_absolute(self): r = self._repo os.remove(os.path.join(r.path, "a")) self.assertRaises(ValueError, r.stage, [os.path.join(r.path, "a")]) def test_stage_deleted(self): r = self._repo os.remove(os.path.join(r.path, "a")) r.stage(["a"]) r.stage(["a"]) # double-stage a deleted path def test_stage_directory(self): r = self._repo os.mkdir(os.path.join(r.path, "c")) r.stage(["c"]) self.assertEqual([b"a"], list(r.open_index())) - @skipIf(sys.platform in ("win32", "darwin"), "tries to implicitly decode as utf8") + @skipIf( + sys.platform in ("win32", "darwin"), + "tries to implicitly decode as utf8", + ) def test_commit_no_encode_decode(self): r = self._repo repo_path_bytes = os.fsencode(r.path) encodings = ("utf8", "latin1") names = [u"À".encode(encoding) for encoding in encodings] for name, encoding in zip(names, encodings): full_path = os.path.join(repo_path_bytes, name) with open(full_path, "wb") as f: f.write(encoding.encode("ascii")) # These files are break tear_down_repo, so cleanup these files # ourselves. self.addCleanup(os.remove, full_path) r.stage(names) commit_sha = r.do_commit( b"Files with different encodings", committer=b"Test Committer ", author=b"Test Author ", commit_timestamp=12395, commit_timezone=0, author_timestamp=12395, author_timezone=0, ref=None, merge_heads=[self._root_commit], ) for name, encoding in zip(names, encodings): mode, id = tree_lookup_path(r.get_object, r[commit_sha].tree, name) self.assertEqual(stat.S_IFREG | 0o644, mode) self.assertEqual(encoding.encode("ascii"), r[id].data) def test_discover_intended(self): path = os.path.join(self._repo_dir, "b/c") r = Repo.discover(path) self.assertEqual(r.head(), self._repo.head()) def test_discover_isrepo(self): r = Repo.discover(self._repo_dir) self.assertEqual(r.head(), self._repo.head()) def test_discover_notrepo(self): with self.assertRaises(NotGitRepository): Repo.discover("/") class CheckUserIdentityTests(TestCase): def test_valid(self): check_user_identity(b"Me ") def test_invalid(self): self.assertRaises(InvalidUserIdentity, check_user_identity, b"No Email") self.assertRaises( InvalidUserIdentity, check_user_identity, b"Fullname " ) self.assertRaises( InvalidUserIdentity, check_user_identity, b"Fullname >order<>" ) diff --git a/dulwich/tests/test_server.py b/dulwich/tests/test_server.py index 61eaf902..42ff04d1 100644 --- a/dulwich/tests/test_server.py +++ b/dulwich/tests/test_server.py @@ -1,1167 +1,1185 @@ # test_server.py -- Tests for the git server # Copyright (C) 2010 Google, Inc. # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Tests for the smart protocol server.""" from io import BytesIO import os import shutil import tempfile import sys from dulwich.errors import ( GitProtocolError, NotGitRepository, UnexpectedCommandError, HangupException, ) from dulwich.objects import Tree from dulwich.object_store import ( MemoryObjectStore, ) from dulwich.repo import ( MemoryRepo, Repo, ) from dulwich.server import ( Backend, DictBackend, FileSystemBackend, MultiAckGraphWalkerImpl, MultiAckDetailedGraphWalkerImpl, PackHandler, _split_proto_line, serve_command, _find_shallow, _ProtocolGraphWalker, ReceivePackHandler, SingleAckGraphWalkerImpl, UploadPackHandler, update_server_info, ) from dulwich.tests import TestCase from dulwich.tests.utils import ( make_commit, make_tag, ) from dulwich.protocol import ( ZERO_SHA, ) ONE = b"1" * 40 TWO = b"2" * 40 THREE = b"3" * 40 FOUR = b"4" * 40 FIVE = b"5" * 40 SIX = b"6" * 40 class TestProto(object): def __init__(self): self._output = [] self._received = {0: [], 1: [], 2: [], 3: []} def set_output(self, output_lines): self._output = output_lines def read_pkt_line(self): if self._output: data = self._output.pop(0) if data is not None: return data.rstrip() + b"\n" else: # flush-pkt ('0000'). return None else: raise HangupException() def write_sideband(self, band, data): self._received[band].append(data) def write_pkt_line(self, data): self._received[0].append(data) def get_received_line(self, band=0): lines = self._received[band] return lines.pop(0) class TestGenericPackHandler(PackHandler): def __init__(self): PackHandler.__init__(self, Backend(), None) @classmethod def capabilities(cls): return [b"cap1", b"cap2", b"cap3"] @classmethod def required_capabilities(cls): return [b"cap2"] class HandlerTestCase(TestCase): def setUp(self): super(HandlerTestCase, self).setUp() self._handler = TestGenericPackHandler() def assertSucceeds(self, func, *args, **kwargs): try: func(*args, **kwargs) except GitProtocolError as e: self.fail(e) def test_capability_line(self): self.assertEqual( b" cap1 cap2 cap3", self._handler.capability_line([b"cap1", b"cap2", b"cap3"]), ) def test_set_client_capabilities(self): set_caps = self._handler.set_client_capabilities self.assertSucceeds(set_caps, [b"cap2"]) self.assertSucceeds(set_caps, [b"cap1", b"cap2"]) # different order self.assertSucceeds(set_caps, [b"cap3", b"cap1", b"cap2"]) # error cases self.assertRaises(GitProtocolError, set_caps, [b"capxxx", b"cap2"]) self.assertRaises(GitProtocolError, set_caps, [b"cap1", b"cap3"]) # ignore innocuous but unknown capabilities self.assertRaises(GitProtocolError, set_caps, [b"cap2", b"ignoreme"]) self.assertFalse(b"ignoreme" in self._handler.capabilities()) self._handler.innocuous_capabilities = lambda: (b"ignoreme",) self.assertSucceeds(set_caps, [b"cap2", b"ignoreme"]) def test_has_capability(self): self.assertRaises(GitProtocolError, self._handler.has_capability, b"cap") caps = self._handler.capabilities() self._handler.set_client_capabilities(caps) for cap in caps: self.assertTrue(self._handler.has_capability(cap)) self.assertFalse(self._handler.has_capability(b"capxxx")) class UploadPackHandlerTestCase(TestCase): def setUp(self): super(UploadPackHandlerTestCase, self).setUp() self._repo = MemoryRepo.init_bare([], {}) backend = DictBackend({b"/": self._repo}) self._handler = UploadPackHandler( backend, [b"/", b"host=lolcathost"], TestProto() ) def test_progress(self): caps = self._handler.required_capabilities() self._handler.set_client_capabilities(caps) self._handler.progress(b"first message") self._handler.progress(b"second message") self.assertEqual(b"first message", self._handler.proto.get_received_line(2)) self.assertEqual(b"second message", self._handler.proto.get_received_line(2)) self.assertRaises(IndexError, self._handler.proto.get_received_line, 2) def test_no_progress(self): caps = list(self._handler.required_capabilities()) + [b"no-progress"] self._handler.set_client_capabilities(caps) self._handler.progress(b"first message") self._handler.progress(b"second message") self.assertRaises(IndexError, self._handler.proto.get_received_line, 2) def test_get_tagged(self): refs = { b"refs/tags/tag1": ONE, b"refs/tags/tag2": TWO, b"refs/heads/master": FOUR, # not a tag, no peeled value } # repo needs to peel this object self._repo.object_store.add_object(make_commit(id=FOUR)) self._repo.refs._update(refs) peeled = { b"refs/tags/tag1": b"1234" * 10, b"refs/tags/tag2": b"5678" * 10, } self._repo.refs._update_peeled(peeled) caps = list(self._handler.required_capabilities()) + [b"include-tag"] self._handler.set_client_capabilities(caps) self.assertEqual( {b"1234" * 10: ONE, b"5678" * 10: TWO}, self._handler.get_tagged(refs, repo=self._repo), ) # non-include-tag case caps = self._handler.required_capabilities() self._handler.set_client_capabilities(caps) self.assertEqual({}, self._handler.get_tagged(refs, repo=self._repo)) def test_nothing_to_do_but_wants(self): # Just the fact that the client claims to want an object is enough # for sending a pack. Even if there turns out to be nothing. refs = {b"refs/tags/tag1": ONE} tree = Tree() self._repo.object_store.add_object(tree) self._repo.object_store.add_object(make_commit(id=ONE, tree=tree)) self._repo.refs._update(refs) self._handler.proto.set_output( [ b"want " + ONE + b" side-band-64k thin-pack ofs-delta", None, b"have " + ONE, b"done", None, ] ) self._handler.handle() # The server should always send a pack, even if it's empty. self.assertTrue(self._handler.proto.get_received_line(1).startswith(b"PACK")) def test_nothing_to_do_no_wants(self): # Don't send a pack if the client didn't ask for anything. refs = {b"refs/tags/tag1": ONE} tree = Tree() self._repo.object_store.add_object(tree) self._repo.object_store.add_object(make_commit(id=ONE, tree=tree)) self._repo.refs._update(refs) self._handler.proto.set_output([None]) self._handler.handle() # The server should not send a pack, since the client didn't ask for # anything. self.assertEqual([], self._handler.proto._received[1]) class FindShallowTests(TestCase): def setUp(self): super(FindShallowTests, self).setUp() self._store = MemoryObjectStore() def make_commit(self, **attrs): commit = make_commit(**attrs) self._store.add_object(commit) return commit def make_linear_commits(self, n, message=b""): commits = [] parents = [] for _ in range(n): commits.append(self.make_commit(parents=parents, message=message)) parents = [commits[-1].id] return commits def assertSameElements(self, expected, actual): self.assertEqual(set(expected), set(actual)) def test_linear(self): c1, c2, c3 = self.make_linear_commits(3) self.assertEqual( (set([c3.id]), set([])), _find_shallow(self._store, [c3.id], 1) ) self.assertEqual( - (set([c2.id]), set([c3.id])), _find_shallow(self._store, [c3.id], 2) + (set([c2.id]), set([c3.id])), + _find_shallow(self._store, [c3.id], 2), ) self.assertEqual( - (set([c1.id]), set([c2.id, c3.id])), _find_shallow(self._store, [c3.id], 3) + (set([c1.id]), set([c2.id, c3.id])), + _find_shallow(self._store, [c3.id], 3), ) self.assertEqual( (set([]), set([c1.id, c2.id, c3.id])), _find_shallow(self._store, [c3.id], 4), ) def test_multiple_independent(self): a = self.make_linear_commits(2, message=b"a") b = self.make_linear_commits(2, message=b"b") c = self.make_linear_commits(2, message=b"c") heads = [a[1].id, b[1].id, c[1].id] self.assertEqual( (set([a[0].id, b[0].id, c[0].id]), set(heads)), _find_shallow(self._store, heads, 2), ) def test_multiple_overlapping(self): # Create the following commit tree: # 1--2 # \ # 3--4 c1, c2 = self.make_linear_commits(2) c3 = self.make_commit(parents=[c1.id]) c4 = self.make_commit(parents=[c3.id]) # 1 is shallow along the path from 4, but not along the path from 2. self.assertEqual( (set([c1.id]), set([c1.id, c2.id, c3.id, c4.id])), _find_shallow(self._store, [c2.id, c4.id], 3), ) def test_merge(self): c1 = self.make_commit() c2 = self.make_commit() c3 = self.make_commit(parents=[c1.id, c2.id]) self.assertEqual( - (set([c1.id, c2.id]), set([c3.id])), _find_shallow(self._store, [c3.id], 2) + (set([c1.id, c2.id]), set([c3.id])), + _find_shallow(self._store, [c3.id], 2), ) def test_tag(self): c1, c2 = self.make_linear_commits(2) tag = make_tag(c2, name=b"tag") self._store.add_object(tag) self.assertEqual( - (set([c1.id]), set([c2.id])), _find_shallow(self._store, [tag.id], 2) + (set([c1.id]), set([c2.id])), + _find_shallow(self._store, [tag.id], 2), ) class TestUploadPackHandler(UploadPackHandler): @classmethod def required_capabilities(self): return [] class ReceivePackHandlerTestCase(TestCase): def setUp(self): super(ReceivePackHandlerTestCase, self).setUp() self._repo = MemoryRepo.init_bare([], {}) backend = DictBackend({b"/": self._repo}) self._handler = ReceivePackHandler( backend, [b"/", b"host=lolcathost"], TestProto() ) def test_apply_pack_del_ref(self): refs = {b"refs/heads/master": TWO, b"refs/heads/fake-branch": ONE} self._repo.refs._update(refs) update_refs = [ [ONE, ZERO_SHA, b"refs/heads/fake-branch"], ] self._handler.set_client_capabilities([b"delete-refs"]) status = self._handler._apply_pack(update_refs) self.assertEqual(status[0][0], b"unpack") self.assertEqual(status[0][1], b"ok") self.assertEqual(status[1][0], b"refs/heads/fake-branch") self.assertEqual(status[1][1], b"ok") class ProtocolGraphWalkerEmptyTestCase(TestCase): def setUp(self): super(ProtocolGraphWalkerEmptyTestCase, self).setUp() self._repo = MemoryRepo.init_bare([], {}) backend = DictBackend({b"/": self._repo}) self._walker = _ProtocolGraphWalker( TestUploadPackHandler(backend, [b"/", b"host=lolcats"], TestProto()), self._repo.object_store, self._repo.get_peeled, self._repo.refs.get_symrefs, ) def test_empty_repository(self): # The server should wait for a flush packet. self._walker.proto.set_output([]) self.assertRaises(HangupException, self._walker.determine_wants, {}) self.assertEqual(None, self._walker.proto.get_received_line()) self._walker.proto.set_output([None]) self.assertEqual([], self._walker.determine_wants({})) self.assertEqual(None, self._walker.proto.get_received_line()) class ProtocolGraphWalkerTestCase(TestCase): def setUp(self): super(ProtocolGraphWalkerTestCase, self).setUp() # Create the following commit tree: # 3---5 # / # 1---2---4 commits = [ make_commit(id=ONE, parents=[], commit_time=111), make_commit(id=TWO, parents=[ONE], commit_time=222), make_commit(id=THREE, parents=[ONE], commit_time=333), make_commit(id=FOUR, parents=[TWO], commit_time=444), make_commit(id=FIVE, parents=[THREE], commit_time=555), ] self._repo = MemoryRepo.init_bare(commits, {}) backend = DictBackend({b"/": self._repo}) self._walker = _ProtocolGraphWalker( TestUploadPackHandler(backend, [b"/", b"host=lolcats"], TestProto()), self._repo.object_store, self._repo.get_peeled, self._repo.refs.get_symrefs, ) def test_all_wants_satisfied_no_haves(self): self._walker.set_wants([ONE]) self.assertFalse(self._walker.all_wants_satisfied([])) self._walker.set_wants([TWO]) self.assertFalse(self._walker.all_wants_satisfied([])) self._walker.set_wants([THREE]) self.assertFalse(self._walker.all_wants_satisfied([])) def test_all_wants_satisfied_have_root(self): self._walker.set_wants([ONE]) self.assertTrue(self._walker.all_wants_satisfied([ONE])) self._walker.set_wants([TWO]) self.assertTrue(self._walker.all_wants_satisfied([ONE])) self._walker.set_wants([THREE]) self.assertTrue(self._walker.all_wants_satisfied([ONE])) def test_all_wants_satisfied_have_branch(self): self._walker.set_wants([TWO]) self.assertTrue(self._walker.all_wants_satisfied([TWO])) # wrong branch self._walker.set_wants([THREE]) self.assertFalse(self._walker.all_wants_satisfied([TWO])) def test_all_wants_satisfied(self): self._walker.set_wants([FOUR, FIVE]) # trivial case: wants == haves self.assertTrue(self._walker.all_wants_satisfied([FOUR, FIVE])) # cases that require walking the commit tree self.assertTrue(self._walker.all_wants_satisfied([ONE])) self.assertFalse(self._walker.all_wants_satisfied([TWO])) self.assertFalse(self._walker.all_wants_satisfied([THREE])) self.assertTrue(self._walker.all_wants_satisfied([TWO, THREE])) def test_split_proto_line(self): allowed = (b"want", b"done", None) self.assertEqual( (b"want", ONE), _split_proto_line(b"want " + ONE + b"\n", allowed) ) self.assertEqual( (b"want", TWO), _split_proto_line(b"want " + TWO + b"\n", allowed) ) self.assertRaises(GitProtocolError, _split_proto_line, b"want xxxx\n", allowed) self.assertRaises( - UnexpectedCommandError, _split_proto_line, b"have " + THREE + b"\n", allowed + UnexpectedCommandError, + _split_proto_line, + b"have " + THREE + b"\n", + allowed, ) self.assertRaises( - GitProtocolError, _split_proto_line, b"foo " + FOUR + b"\n", allowed + GitProtocolError, + _split_proto_line, + b"foo " + FOUR + b"\n", + allowed, ) self.assertRaises(GitProtocolError, _split_proto_line, b"bar", allowed) self.assertEqual((b"done", None), _split_proto_line(b"done\n", allowed)) self.assertEqual((None, None), _split_proto_line(b"", allowed)) def test_determine_wants(self): self._walker.proto.set_output([None]) self.assertEqual([], self._walker.determine_wants({})) self.assertEqual(None, self._walker.proto.get_received_line()) self._walker.proto.set_output( [ b"want " + ONE + b" multi_ack", b"want " + TWO, None, ] ) heads = { b"refs/heads/ref1": ONE, b"refs/heads/ref2": TWO, b"refs/heads/ref3": THREE, } self._repo.refs._update(heads) self.assertEqual([ONE, TWO], self._walker.determine_wants(heads)) self._walker.advertise_refs = True self.assertEqual([], self._walker.determine_wants(heads)) self._walker.advertise_refs = False self._walker.proto.set_output([b"want " + FOUR + b" multi_ack", None]) self.assertRaises(GitProtocolError, self._walker.determine_wants, heads) self._walker.proto.set_output([None]) self.assertEqual([], self._walker.determine_wants(heads)) self._walker.proto.set_output([b"want " + ONE + b" multi_ack", b"foo", None]) self.assertRaises(GitProtocolError, self._walker.determine_wants, heads) self._walker.proto.set_output([b"want " + FOUR + b" multi_ack", None]) self.assertRaises(GitProtocolError, self._walker.determine_wants, heads) def test_determine_wants_advertisement(self): self._walker.proto.set_output([None]) # advertise branch tips plus tag heads = { b"refs/heads/ref4": FOUR, b"refs/heads/ref5": FIVE, b"refs/heads/tag6": SIX, } self._repo.refs._update(heads) self._repo.refs._update_peeled(heads) self._repo.refs._update_peeled({b"refs/heads/tag6": FIVE}) self._walker.determine_wants(heads) lines = [] while True: line = self._walker.proto.get_received_line() if line is None: break # strip capabilities list if present if b"\x00" in line: line = line[: line.index(b"\x00")] lines.append(line.rstrip()) self.assertEqual( [ FOUR + b" refs/heads/ref4", FIVE + b" refs/heads/ref5", FIVE + b" refs/heads/tag6^{}", SIX + b" refs/heads/tag6", ], sorted(lines), ) # ensure peeled tag was advertised immediately following tag for i, line in enumerate(lines): if line.endswith(b" refs/heads/tag6"): self.assertEqual(FIVE + b" refs/heads/tag6^{}", lines[i + 1]) # TODO: test commit time cutoff def _handle_shallow_request(self, lines, heads): self._walker.proto.set_output(lines + [None]) self._walker._handle_shallow_request(heads) def assertReceived(self, expected): self.assertEqual( expected, list(iter(self._walker.proto.get_received_line, None)) ) def test_handle_shallow_request_no_client_shallows(self): self._handle_shallow_request([b"deepen 2\n"], [FOUR, FIVE]) self.assertEqual(set([TWO, THREE]), self._walker.shallow) self.assertReceived( [ b"shallow " + TWO, b"shallow " + THREE, ] ) def test_handle_shallow_request_no_new_shallows(self): lines = [ b"shallow " + TWO + b"\n", b"shallow " + THREE + b"\n", b"deepen 2\n", ] self._handle_shallow_request(lines, [FOUR, FIVE]) self.assertEqual(set([TWO, THREE]), self._walker.shallow) self.assertReceived([]) def test_handle_shallow_request_unshallows(self): lines = [ b"shallow " + TWO + b"\n", b"deepen 3\n", ] self._handle_shallow_request(lines, [FOUR, FIVE]) self.assertEqual(set([ONE]), self._walker.shallow) self.assertReceived( [ b"shallow " + ONE, b"unshallow " + TWO, # THREE is unshallow but was is not shallow in the client ] ) class TestProtocolGraphWalker(object): def __init__(self): self.acks = [] self.lines = [] self.wants_satisified = False self.stateless_rpc = None self.advertise_refs = False self._impl = None self.done_required = True self.done_received = False self._empty = False self.pack_sent = False def read_proto_line(self, allowed): command, sha = self.lines.pop(0) if allowed is not None: assert command in allowed return command, sha def send_ack(self, sha, ack_type=b""): self.acks.append((sha, ack_type)) def send_nak(self): self.acks.append((None, b"nak")) def all_wants_satisfied(self, haves): if haves: return self.wants_satisified def pop_ack(self): if not self.acks: return None return self.acks.pop(0) def handle_done(self): if not self._impl: return # Whether or not PACK is sent after is determined by this, so # record this value. self.pack_sent = self._impl.handle_done(self.done_required, self.done_received) return self.pack_sent def notify_done(self): self.done_received = True class AckGraphWalkerImplTestCase(TestCase): """Base setup and asserts for AckGraphWalker tests.""" def setUp(self): super(AckGraphWalkerImplTestCase, self).setUp() self._walker = TestProtocolGraphWalker() self._walker.lines = [ (b"have", TWO), (b"have", ONE), (b"have", THREE), (b"done", None), ] self._impl = self.impl_cls(self._walker) self._walker._impl = self._impl def assertNoAck(self): self.assertEqual(None, self._walker.pop_ack()) def assertAcks(self, acks): for sha, ack_type in acks: self.assertEqual((sha, ack_type), self._walker.pop_ack()) self.assertNoAck() def assertAck(self, sha, ack_type=b""): self.assertAcks([(sha, ack_type)]) def assertNak(self): self.assertAck(None, b"nak") def assertNextEquals(self, sha): self.assertEqual(sha, next(self._impl)) def assertNextEmpty(self): # This is necessary because of no-done - the assumption that it # it safe to immediately send out the final ACK is no longer # true but the test is still needed for it. TestProtocolWalker # does implement the handle_done which will determine whether # the final confirmation can be sent. self.assertRaises(IndexError, next, self._impl) self._walker.handle_done() class SingleAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase): impl_cls = SingleAckGraphWalkerImpl def test_single_ack(self): self.assertNextEquals(TWO) self.assertNoAck() self.assertNextEquals(ONE) self._impl.ack(ONE) self.assertAck(ONE) self.assertNextEquals(THREE) self._impl.ack(THREE) self.assertNoAck() self.assertNextEquals(None) self.assertNoAck() def test_single_ack_flush(self): # same as ack test but ends with a flush-pkt instead of done self._walker.lines[-1] = (None, None) self.assertNextEquals(TWO) self.assertNoAck() self.assertNextEquals(ONE) self._impl.ack(ONE) self.assertAck(ONE) self.assertNextEquals(THREE) self.assertNoAck() self.assertNextEquals(None) self.assertNoAck() def test_single_ack_nak(self): self.assertNextEquals(TWO) self.assertNoAck() self.assertNextEquals(ONE) self.assertNoAck() self.assertNextEquals(THREE) self.assertNoAck() self.assertNextEquals(None) self.assertNextEmpty() self.assertNak() def test_single_ack_nak_flush(self): # same as nak test but ends with a flush-pkt instead of done self._walker.lines[-1] = (None, None) self.assertNextEquals(TWO) self.assertNoAck() self.assertNextEquals(ONE) self.assertNoAck() self.assertNextEquals(THREE) self.assertNoAck() self.assertNextEquals(None) self.assertNextEmpty() self.assertNak() class MultiAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase): impl_cls = MultiAckGraphWalkerImpl def test_multi_ack(self): self.assertNextEquals(TWO) self.assertNoAck() self.assertNextEquals(ONE) self._impl.ack(ONE) self.assertAck(ONE, b"continue") self.assertNextEquals(THREE) self._impl.ack(THREE) self.assertAck(THREE, b"continue") self.assertNextEquals(None) self.assertNextEmpty() self.assertAck(THREE) def test_multi_ack_partial(self): self.assertNextEquals(TWO) self.assertNoAck() self.assertNextEquals(ONE) self._impl.ack(ONE) self.assertAck(ONE, b"continue") self.assertNextEquals(THREE) self.assertNoAck() self.assertNextEquals(None) self.assertNextEmpty() self.assertAck(ONE) def test_multi_ack_flush(self): self._walker.lines = [ (b"have", TWO), (None, None), (b"have", ONE), (b"have", THREE), (b"done", None), ] self.assertNextEquals(TWO) self.assertNoAck() self.assertNextEquals(ONE) self.assertNak() # nak the flush-pkt self._impl.ack(ONE) self.assertAck(ONE, b"continue") self.assertNextEquals(THREE) self._impl.ack(THREE) self.assertAck(THREE, b"continue") self.assertNextEquals(None) self.assertNextEmpty() self.assertAck(THREE) def test_multi_ack_nak(self): self.assertNextEquals(TWO) self.assertNoAck() self.assertNextEquals(ONE) self.assertNoAck() self.assertNextEquals(THREE) self.assertNoAck() self.assertNextEquals(None) self.assertNextEmpty() self.assertNak() class MultiAckDetailedGraphWalkerImplTestCase(AckGraphWalkerImplTestCase): impl_cls = MultiAckDetailedGraphWalkerImpl def test_multi_ack(self): self.assertNextEquals(TWO) self.assertNoAck() self.assertNextEquals(ONE) self._impl.ack(ONE) self.assertAck(ONE, b"common") self.assertNextEquals(THREE) self._impl.ack(THREE) self.assertAck(THREE, b"common") # done is read. self._walker.wants_satisified = True self.assertNextEquals(None) self._walker.lines.append((None, None)) self.assertNextEmpty() self.assertAcks([(THREE, b"ready"), (None, b"nak"), (THREE, b"")]) # PACK is sent self.assertTrue(self._walker.pack_sent) def test_multi_ack_nodone(self): self._walker.done_required = False self.assertNextEquals(TWO) self.assertNoAck() self.assertNextEquals(ONE) self._impl.ack(ONE) self.assertAck(ONE, b"common") self.assertNextEquals(THREE) self._impl.ack(THREE) self.assertAck(THREE, b"common") # done is read. self._walker.wants_satisified = True self.assertNextEquals(None) self._walker.lines.append((None, None)) self.assertNextEmpty() self.assertAcks([(THREE, b"ready"), (None, b"nak"), (THREE, b"")]) # PACK is sent self.assertTrue(self._walker.pack_sent) def test_multi_ack_flush_end(self): # transmission ends with a flush-pkt without a done but no-done is # assumed. self._walker.lines[-1] = (None, None) self.assertNextEquals(TWO) self.assertNoAck() self.assertNextEquals(ONE) self._impl.ack(ONE) self.assertAck(ONE, b"common") self.assertNextEquals(THREE) self._impl.ack(THREE) self.assertAck(THREE, b"common") # no done is read self._walker.wants_satisified = True self.assertNextEmpty() self.assertAcks([(THREE, b"ready"), (None, b"nak")]) # PACK is NOT sent self.assertFalse(self._walker.pack_sent) def test_multi_ack_flush_end_nodone(self): # transmission ends with a flush-pkt without a done but no-done is # assumed. self._walker.lines[-1] = (None, None) self._walker.done_required = False self.assertNextEquals(TWO) self.assertNoAck() self.assertNextEquals(ONE) self._impl.ack(ONE) self.assertAck(ONE, b"common") self.assertNextEquals(THREE) self._impl.ack(THREE) self.assertAck(THREE, b"common") # no done is read, but pretend it is (last 'ACK 'commit_id' '') self._walker.wants_satisified = True self.assertNextEmpty() self.assertAcks([(THREE, b"ready"), (None, b"nak"), (THREE, b"")]) # PACK is sent self.assertTrue(self._walker.pack_sent) def test_multi_ack_partial(self): self.assertNextEquals(TWO) self.assertNoAck() self.assertNextEquals(ONE) self._impl.ack(ONE) self.assertAck(ONE, b"common") self.assertNextEquals(THREE) self.assertNoAck() self.assertNextEquals(None) self.assertNextEmpty() self.assertAck(ONE) def test_multi_ack_flush(self): # same as ack test but contains a flush-pkt in the middle self._walker.lines = [ (b"have", TWO), (None, None), (b"have", ONE), (b"have", THREE), (b"done", None), (None, None), ] self.assertNextEquals(TWO) self.assertNoAck() self.assertNextEquals(ONE) self.assertNak() # nak the flush-pkt self._impl.ack(ONE) self.assertAck(ONE, b"common") self.assertNextEquals(THREE) self._impl.ack(THREE) self.assertAck(THREE, b"common") self._walker.wants_satisified = True self.assertNextEquals(None) self.assertNextEmpty() self.assertAcks([(THREE, b"ready"), (None, b"nak"), (THREE, b"")]) def test_multi_ack_nak(self): self.assertNextEquals(TWO) self.assertNoAck() self.assertNextEquals(ONE) self.assertNoAck() self.assertNextEquals(THREE) self.assertNoAck() # Done is sent here. self.assertNextEquals(None) self.assertNextEmpty() self.assertNak() self.assertNextEmpty() self.assertTrue(self._walker.pack_sent) def test_multi_ack_nak_nodone(self): self._walker.done_required = False self.assertNextEquals(TWO) self.assertNoAck() self.assertNextEquals(ONE) self.assertNoAck() self.assertNextEquals(THREE) self.assertNoAck() # Done is sent here. self.assertFalse(self._walker.pack_sent) self.assertNextEquals(None) self.assertNextEmpty() self.assertTrue(self._walker.pack_sent) self.assertNak() self.assertNextEmpty() def test_multi_ack_nak_flush(self): # same as nak test but contains a flush-pkt in the middle self._walker.lines = [ (b"have", TWO), (None, None), (b"have", ONE), (b"have", THREE), (b"done", None), ] self.assertNextEquals(TWO) self.assertNoAck() self.assertNextEquals(ONE) self.assertNak() self.assertNextEquals(THREE) self.assertNoAck() self.assertNextEquals(None) self.assertNextEmpty() self.assertNak() def test_multi_ack_stateless(self): # transmission ends with a flush-pkt self._walker.lines[-1] = (None, None) self._walker.stateless_rpc = True self.assertNextEquals(TWO) self.assertNoAck() self.assertNextEquals(ONE) self.assertNoAck() self.assertNextEquals(THREE) self.assertNoAck() self.assertFalse(self._walker.pack_sent) self.assertNextEquals(None) self.assertNak() self.assertNextEmpty() self.assertNoAck() self.assertFalse(self._walker.pack_sent) def test_multi_ack_stateless_nodone(self): self._walker.done_required = False # transmission ends with a flush-pkt self._walker.lines[-1] = (None, None) self._walker.stateless_rpc = True self.assertNextEquals(TWO) self.assertNoAck() self.assertNextEquals(ONE) self.assertNoAck() self.assertNextEquals(THREE) self.assertNoAck() self.assertFalse(self._walker.pack_sent) self.assertNextEquals(None) self.assertNak() self.assertNextEmpty() self.assertNoAck() # PACK will still not be sent. self.assertFalse(self._walker.pack_sent) class FileSystemBackendTests(TestCase): """Tests for FileSystemBackend.""" def setUp(self): super(FileSystemBackendTests, self).setUp() self.path = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.path) self.repo = Repo.init(self.path) if sys.platform == "win32": self.backend = FileSystemBackend(self.path[0] + ":" + os.sep) else: self.backend = FileSystemBackend() def test_nonexistant(self): self.assertRaises( - NotGitRepository, self.backend.open_repository, "/does/not/exist/unless/foo" + NotGitRepository, + self.backend.open_repository, + "/does/not/exist/unless/foo", ) def test_absolute(self): repo = self.backend.open_repository(self.path) self.assertTrue( os.path.samefile( os.path.abspath(repo.path), os.path.abspath(self.repo.path) ) ) def test_child(self): self.assertRaises( NotGitRepository, self.backend.open_repository, os.path.join(self.path, "foo"), ) def test_bad_repo_path(self): backend = FileSystemBackend() self.assertRaises(NotGitRepository, lambda: backend.open_repository("/ups")) class DictBackendTests(TestCase): """Tests for DictBackend.""" def test_nonexistant(self): repo = MemoryRepo.init_bare([], {}) backend = DictBackend({b"/": repo}) self.assertRaises( - NotGitRepository, backend.open_repository, "/does/not/exist/unless/foo" + NotGitRepository, + backend.open_repository, + "/does/not/exist/unless/foo", ) def test_bad_repo_path(self): repo = MemoryRepo.init_bare([], {}) backend = DictBackend({b"/": repo}) self.assertRaises(NotGitRepository, lambda: backend.open_repository("/ups")) class ServeCommandTests(TestCase): """Tests for serve_command.""" def setUp(self): super(ServeCommandTests, self).setUp() self.backend = DictBackend({}) def serve_command(self, handler_cls, args, inf, outf): return serve_command( - handler_cls, [b"test"] + args, backend=self.backend, inf=inf, outf=outf + handler_cls, + [b"test"] + args, + backend=self.backend, + inf=inf, + outf=outf, ) def test_receive_pack(self): commit = make_commit(id=ONE, parents=[], commit_time=111) self.backend.repos[b"/"] = MemoryRepo.init_bare( [commit], {b"refs/heads/master": commit.id} ) outf = BytesIO() exitcode = self.serve_command( ReceivePackHandler, [b"/"], BytesIO(b"0000"), outf ) outlines = outf.getvalue().splitlines() self.assertEqual(2, len(outlines)) self.assertEqual( b"1111111111111111111111111111111111111111 refs/heads/master", outlines[0][4:].split(b"\x00")[0], ) self.assertEqual(b"0000", outlines[-1]) self.assertEqual(0, exitcode) class UpdateServerInfoTests(TestCase): """Tests for update_server_info.""" def setUp(self): super(UpdateServerInfoTests, self).setUp() self.path = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.path) self.repo = Repo.init(self.path) def test_empty(self): update_server_info(self.repo) with open(os.path.join(self.path, ".git", "info", "refs"), "rb") as f: self.assertEqual(b"", f.read()) p = os.path.join(self.path, ".git", "objects", "info", "packs") with open(p, "rb") as f: self.assertEqual(b"", f.read()) def test_simple(self): commit_id = self.repo.do_commit( message=b"foo", committer=b"Joe Example ", ref=b"refs/heads/foo", ) update_server_info(self.repo) with open(os.path.join(self.path, ".git", "info", "refs"), "rb") as f: self.assertEqual(f.read(), commit_id + b"\trefs/heads/foo\n") p = os.path.join(self.path, ".git", "objects", "info", "packs") with open(p, "rb") as f: self.assertEqual(f.read(), b"") diff --git a/dulwich/tests/test_walk.py b/dulwich/tests/test_walk.py index 55911e02..86a35ab4 100644 --- a/dulwich/tests/test_walk.py +++ b/dulwich/tests/test_walk.py @@ -1,619 +1,632 @@ # test_walk.py -- Tests for commit walking functionality. # Copyright (C) 2010 Google, Inc. # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Tests for commit walking functionality.""" from itertools import ( permutations, ) from unittest import expectedFailure from dulwich.diff_tree import ( CHANGE_MODIFY, CHANGE_RENAME, TreeChange, RenameDetector, ) from dulwich.errors import ( MissingCommitError, ) from dulwich.object_store import ( MemoryObjectStore, ) from dulwich.objects import ( Commit, Blob, ) from dulwich.walk import ORDER_TOPO, WalkEntry, Walker, _topo_reorder from dulwich.tests import TestCase from dulwich.tests.utils import ( F, make_object, make_tag, build_commit_graph, ) class TestWalkEntry(object): def __init__(self, commit, changes): self.commit = commit self.changes = changes def __repr__(self): - return "" % (self.commit.id, self.changes) + return "" % ( + self.commit.id, + self.changes, + ) def __eq__(self, other): if not isinstance(other, WalkEntry) or self.commit != other.commit: return False if self.changes is None: return True return self.changes == other.changes() class WalkerTest(TestCase): def setUp(self): super(WalkerTest, self).setUp() self.store = MemoryObjectStore() def make_commits(self, commit_spec, **kwargs): times = kwargs.pop("times", []) attrs = kwargs.pop("attrs", {}) for i, t in enumerate(times): attrs.setdefault(i + 1, {})["commit_time"] = t return build_commit_graph(self.store, commit_spec, attrs=attrs, **kwargs) def make_linear_commits(self, num_commits, **kwargs): commit_spec = [] for i in range(1, num_commits + 1): c = [i] if i > 1: c.append(i - 1) commit_spec.append(c) return self.make_commits(commit_spec, **kwargs) def assertWalkYields(self, expected, *args, **kwargs): walker = Walker(self.store, *args, **kwargs) expected = list(expected) for i, entry in enumerate(expected): if isinstance(entry, Commit): expected[i] = TestWalkEntry(entry, None) actual = list(walker) self.assertEqual(expected, actual) def test_tag(self): c1, c2, c3 = self.make_linear_commits(3) t2 = make_tag(target=c2) self.store.add_object(t2) self.assertWalkYields([c2, c1], [t2.id]) def test_linear(self): c1, c2, c3 = self.make_linear_commits(3) self.assertWalkYields([c1], [c1.id]) self.assertWalkYields([c2, c1], [c2.id]) self.assertWalkYields([c3, c2, c1], [c3.id]) self.assertWalkYields([c3, c2, c1], [c3.id, c1.id]) self.assertWalkYields([c3, c2], [c3.id], exclude=[c1.id]) self.assertWalkYields([c3, c2], [c3.id, c1.id], exclude=[c1.id]) self.assertWalkYields([c3], [c3.id, c1.id], exclude=[c2.id]) def test_missing(self): cs = list(reversed(self.make_linear_commits(20))) self.assertWalkYields(cs, [cs[0].id]) # Exactly how close we can get to a missing commit depends on our # implementation (in particular the choice of _MAX_EXTRA_COMMITS), but # we should at least be able to walk some history in a broken repo. del self.store[cs[-1].id] for i in range(1, 11): self.assertWalkYields(cs[:i], [cs[0].id], max_entries=i) self.assertRaises(MissingCommitError, Walker, self.store, [cs[-1].id]) def test_branch(self): c1, x2, x3, y4 = self.make_commits([[1], [2, 1], [3, 2], [4, 1]]) self.assertWalkYields([x3, x2, c1], [x3.id]) self.assertWalkYields([y4, c1], [y4.id]) self.assertWalkYields([y4, x2, c1], [y4.id, x2.id]) self.assertWalkYields([y4, x2], [y4.id, x2.id], exclude=[c1.id]) self.assertWalkYields([y4, x3], [y4.id, x3.id], exclude=[x2.id]) self.assertWalkYields([y4], [y4.id], exclude=[x3.id]) self.assertWalkYields([x3, x2], [x3.id], exclude=[y4.id]) def test_merge(self): c1, c2, c3, c4 = self.make_commits([[1], [2, 1], [3, 1], [4, 2, 3]]) self.assertWalkYields([c4, c3, c2, c1], [c4.id]) self.assertWalkYields([c3, c1], [c3.id]) self.assertWalkYields([c2, c1], [c2.id]) self.assertWalkYields([c4, c3], [c4.id], exclude=[c2.id]) self.assertWalkYields([c4, c2], [c4.id], exclude=[c3.id]) def test_merge_of_new_branch_from_old_base(self): # The commit on the branch was made at a time after any of the # commits on master, but the branch was from an older commit. # See also test_merge_of_old_branch self.maxDiff = None c1, c2, c3, c4, c5 = self.make_commits( [[1], [2, 1], [3, 2], [4, 1], [5, 3, 4]], times=[1, 2, 3, 4, 5], ) self.assertWalkYields([c5, c4, c3, c2, c1], [c5.id]) self.assertWalkYields([c3, c2, c1], [c3.id]) self.assertWalkYields([c2, c1], [c2.id]) @expectedFailure def test_merge_of_old_branch(self): # The commit on the branch was made at a time before any of # the commits on master, but it was merged into master after # those commits. # See also test_merge_of_new_branch_from_old_base self.maxDiff = None c1, c2, c3, c4, c5 = self.make_commits( [[1], [2, 1], [3, 2], [4, 1], [5, 3, 4]], times=[1, 3, 4, 2, 5], ) self.assertWalkYields([c5, c4, c3, c2, c1], [c5.id]) self.assertWalkYields([c3, c2, c1], [c3.id]) self.assertWalkYields([c2, c1], [c2.id]) def test_reverse(self): c1, c2, c3 = self.make_linear_commits(3) self.assertWalkYields([c1, c2, c3], [c3.id], reverse=True) def test_max_entries(self): c1, c2, c3 = self.make_linear_commits(3) self.assertWalkYields([c3, c2, c1], [c3.id], max_entries=3) self.assertWalkYields([c3, c2], [c3.id], max_entries=2) self.assertWalkYields([c3], [c3.id], max_entries=1) def test_reverse_after_max_entries(self): c1, c2, c3 = self.make_linear_commits(3) self.assertWalkYields([c1, c2, c3], [c3.id], max_entries=3, reverse=True) self.assertWalkYields([c2, c3], [c3.id], max_entries=2, reverse=True) self.assertWalkYields([c3], [c3.id], max_entries=1, reverse=True) def test_changes_one_parent(self): blob_a1 = make_object(Blob, data=b"a1") blob_a2 = make_object(Blob, data=b"a2") blob_b2 = make_object(Blob, data=b"b2") c1, c2 = self.make_linear_commits( - 2, trees={1: [(b"a", blob_a1)], 2: [(b"a", blob_a2), (b"b", blob_b2)]} + 2, + trees={ + 1: [(b"a", blob_a1)], + 2: [(b"a", blob_a2), (b"b", blob_b2)], + }, ) e1 = TestWalkEntry(c1, [TreeChange.add((b"a", F, blob_a1.id))]) e2 = TestWalkEntry( c2, [ TreeChange(CHANGE_MODIFY, (b"a", F, blob_a1.id), (b"a", F, blob_a2.id)), TreeChange.add((b"b", F, blob_b2.id)), ], ) self.assertWalkYields([e2, e1], [c2.id]) def test_changes_multiple_parents(self): blob_a1 = make_object(Blob, data=b"a1") blob_b2 = make_object(Blob, data=b"b2") blob_a3 = make_object(Blob, data=b"a3") c1, c2, c3 = self.make_commits( [[1], [2], [3, 1, 2]], trees={ 1: [(b"a", blob_a1)], 2: [(b"b", blob_b2)], 3: [(b"a", blob_a3), (b"b", blob_b2)], }, ) # a is a modify/add conflict and b is not conflicted. changes = [ [ TreeChange(CHANGE_MODIFY, (b"a", F, blob_a1.id), (b"a", F, blob_a3.id)), TreeChange.add((b"a", F, blob_a3.id)), ] ] self.assertWalkYields( [TestWalkEntry(c3, changes)], [c3.id], exclude=[c1.id, c2.id] ) def test_path_matches(self): walker = Walker(None, [], paths=[b"foo", b"bar", b"baz/quux"]) self.assertTrue(walker._path_matches(b"foo")) self.assertTrue(walker._path_matches(b"foo/a")) self.assertTrue(walker._path_matches(b"foo/a/b")) self.assertTrue(walker._path_matches(b"bar")) self.assertTrue(walker._path_matches(b"baz/quux")) self.assertTrue(walker._path_matches(b"baz/quux/a")) self.assertFalse(walker._path_matches(None)) self.assertFalse(walker._path_matches(b"oops")) self.assertFalse(walker._path_matches(b"fool")) self.assertFalse(walker._path_matches(b"baz")) self.assertFalse(walker._path_matches(b"baz/quu")) def test_paths(self): blob_a1 = make_object(Blob, data=b"a1") blob_b2 = make_object(Blob, data=b"b2") blob_a3 = make_object(Blob, data=b"a3") blob_b3 = make_object(Blob, data=b"b3") c1, c2, c3 = self.make_linear_commits( 3, trees={ 1: [(b"a", blob_a1)], 2: [(b"a", blob_a1), (b"x/b", blob_b2)], 3: [(b"a", blob_a3), (b"x/b", blob_b3)], }, ) self.assertWalkYields([c3, c2, c1], [c3.id]) self.assertWalkYields([c3, c1], [c3.id], paths=[b"a"]) self.assertWalkYields([c3, c2], [c3.id], paths=[b"x/b"]) # All changes are included, not just for requested paths. changes = [ TreeChange(CHANGE_MODIFY, (b"a", F, blob_a1.id), (b"a", F, blob_a3.id)), TreeChange(CHANGE_MODIFY, (b"x/b", F, blob_b2.id), (b"x/b", F, blob_b3.id)), ] self.assertWalkYields( [TestWalkEntry(c3, changes)], [c3.id], max_entries=1, paths=[b"a"] ) def test_paths_subtree(self): blob_a = make_object(Blob, data=b"a") blob_b = make_object(Blob, data=b"b") c1, c2, c3 = self.make_linear_commits( 3, trees={ 1: [(b"x/a", blob_a)], 2: [(b"b", blob_b), (b"x/a", blob_a)], 3: [(b"b", blob_b), (b"x/a", blob_a), (b"x/b", blob_b)], }, ) self.assertWalkYields([c2], [c3.id], paths=[b"b"]) self.assertWalkYields([c3, c1], [c3.id], paths=[b"x"]) def test_paths_max_entries(self): blob_a = make_object(Blob, data=b"a") blob_b = make_object(Blob, data=b"b") c1, c2 = self.make_linear_commits( 2, trees={1: [(b"a", blob_a)], 2: [(b"a", blob_a), (b"b", blob_b)]} ) self.assertWalkYields([c2], [c2.id], paths=[b"b"], max_entries=1) self.assertWalkYields([c1], [c1.id], paths=[b"a"], max_entries=1) def test_paths_merge(self): blob_a1 = make_object(Blob, data=b"a1") blob_a2 = make_object(Blob, data=b"a2") blob_a3 = make_object(Blob, data=b"a3") x1, y2, m3, m4 = self.make_commits( [[1], [2], [3, 1, 2], [4, 1, 2]], trees={ 1: [(b"a", blob_a1)], 2: [(b"a", blob_a2)], 3: [(b"a", blob_a3)], 4: [(b"a", blob_a1)], }, ) # Non-conflicting self.assertWalkYields([m3, y2, x1], [m3.id], paths=[b"a"]) self.assertWalkYields([y2, x1], [m4.id], paths=[b"a"]) def test_changes_with_renames(self): blob = make_object(Blob, data=b"blob") c1, c2 = self.make_linear_commits( 2, trees={1: [(b"a", blob)], 2: [(b"b", blob)]} ) entry_a = (b"a", F, blob.id) entry_b = (b"b", F, blob.id) - changes_without_renames = [TreeChange.delete(entry_a), TreeChange.add(entry_b)] + changes_without_renames = [ + TreeChange.delete(entry_a), + TreeChange.add(entry_b), + ] changes_with_renames = [TreeChange(CHANGE_RENAME, entry_a, entry_b)] self.assertWalkYields( - [TestWalkEntry(c2, changes_without_renames)], [c2.id], max_entries=1 + [TestWalkEntry(c2, changes_without_renames)], + [c2.id], + max_entries=1, ) detector = RenameDetector(self.store) self.assertWalkYields( [TestWalkEntry(c2, changes_with_renames)], [c2.id], max_entries=1, rename_detector=detector, ) def test_follow_rename(self): blob = make_object(Blob, data=b"blob") names = [b"a", b"a", b"b", b"b", b"c", b"c"] trees = dict((i + 1, [(n, blob, F)]) for i, n in enumerate(names)) c1, c2, c3, c4, c5, c6 = self.make_linear_commits(6, trees=trees) self.assertWalkYields([c5], [c6.id], paths=[b"c"]) def e(n): return (n, F, blob.id) self.assertWalkYields( [ TestWalkEntry(c5, [TreeChange(CHANGE_RENAME, e(b"b"), e(b"c"))]), TestWalkEntry(c3, [TreeChange(CHANGE_RENAME, e(b"a"), e(b"b"))]), TestWalkEntry(c1, [TreeChange.add(e(b"a"))]), ], [c6.id], paths=[b"c"], follow=True, ) def test_follow_rename_remove_path(self): blob = make_object(Blob, data=b"blob") _, _, _, c4, c5, c6 = self.make_linear_commits( 6, trees={ 1: [(b"a", blob), (b"c", blob)], 2: [], 3: [], 4: [(b"b", blob)], 5: [(b"a", blob)], 6: [(b"c", blob)], }, ) def e(n): return (n, F, blob.id) # Once the path changes to b, we aren't interested in a or c anymore. self.assertWalkYields( [ TestWalkEntry(c6, [TreeChange(CHANGE_RENAME, e(b"a"), e(b"c"))]), TestWalkEntry(c5, [TreeChange(CHANGE_RENAME, e(b"b"), e(b"a"))]), TestWalkEntry(c4, [TreeChange.add(e(b"b"))]), ], [c6.id], paths=[b"c"], follow=True, ) def test_since(self): c1, c2, c3 = self.make_linear_commits(3) self.assertWalkYields([c3, c2, c1], [c3.id], since=-1) self.assertWalkYields([c3, c2, c1], [c3.id], since=0) self.assertWalkYields([c3, c2], [c3.id], since=1) self.assertWalkYields([c3, c2], [c3.id], since=99) self.assertWalkYields([c3, c2], [c3.id], since=100) self.assertWalkYields([c3], [c3.id], since=101) self.assertWalkYields([c3], [c3.id], since=199) self.assertWalkYields([c3], [c3.id], since=200) self.assertWalkYields([], [c3.id], since=201) self.assertWalkYields([], [c3.id], since=300) def test_until(self): c1, c2, c3 = self.make_linear_commits(3) self.assertWalkYields([], [c3.id], until=-1) self.assertWalkYields([c1], [c3.id], until=0) self.assertWalkYields([c1], [c3.id], until=1) self.assertWalkYields([c1], [c3.id], until=99) self.assertWalkYields([c2, c1], [c3.id], until=100) self.assertWalkYields([c2, c1], [c3.id], until=101) self.assertWalkYields([c2, c1], [c3.id], until=199) self.assertWalkYields([c3, c2, c1], [c3.id], until=200) self.assertWalkYields([c3, c2, c1], [c3.id], until=201) self.assertWalkYields([c3, c2, c1], [c3.id], until=300) def test_since_until(self): c1, c2, c3 = self.make_linear_commits(3) self.assertWalkYields([], [c3.id], since=100, until=99) self.assertWalkYields([c3, c2, c1], [c3.id], since=-1, until=201) self.assertWalkYields([c2], [c3.id], since=100, until=100) self.assertWalkYields([c2], [c3.id], since=50, until=150) def test_since_over_scan(self): commits = self.make_linear_commits(11, times=[9, 0, 1, 2, 3, 4, 5, 8, 6, 7, 9]) c8, _, c10, c11 = commits[-4:] del self.store[commits[0].id] # c9 is older than we want to walk, but is out of order with its # parent, so we need to walk past it to get to c8. # c1 would also match, but we've deleted it, and it should get pruned # even with over-scanning. self.assertWalkYields([c11, c10, c8], [c11.id], since=7) def assertTopoOrderEqual(self, expected_commits, commits): entries = [TestWalkEntry(c, None) for c in commits] actual_ids = [e.commit.id for e in list(_topo_reorder(entries))] self.assertEqual([c.id for c in expected_commits], actual_ids) def test_topo_reorder_linear(self): commits = self.make_linear_commits(5) commits.reverse() for perm in permutations(commits): self.assertTopoOrderEqual(commits, perm) def test_topo_reorder_multiple_parents(self): c1, c2, c3 = self.make_commits([[1], [2], [3, 1, 2]]) # Already sorted, so totally FIFO. self.assertTopoOrderEqual([c3, c2, c1], [c3, c2, c1]) self.assertTopoOrderEqual([c3, c1, c2], [c3, c1, c2]) # c3 causes one parent to be yielded. self.assertTopoOrderEqual([c3, c2, c1], [c2, c3, c1]) self.assertTopoOrderEqual([c3, c1, c2], [c1, c3, c2]) # c3 causes both parents to be yielded. self.assertTopoOrderEqual([c3, c2, c1], [c1, c2, c3]) self.assertTopoOrderEqual([c3, c2, c1], [c2, c1, c3]) def test_topo_reorder_multiple_children(self): c1, c2, c3 = self.make_commits([[1], [2, 1], [3, 1]]) # c2 and c3 are FIFO but c1 moves to the end. self.assertTopoOrderEqual([c3, c2, c1], [c3, c2, c1]) self.assertTopoOrderEqual([c3, c2, c1], [c3, c1, c2]) self.assertTopoOrderEqual([c3, c2, c1], [c1, c3, c2]) self.assertTopoOrderEqual([c2, c3, c1], [c2, c3, c1]) self.assertTopoOrderEqual([c2, c3, c1], [c2, c1, c3]) self.assertTopoOrderEqual([c2, c3, c1], [c1, c2, c3]) def test_out_of_order_children(self): c1, c2, c3, c4, c5 = self.make_commits( [[1], [2, 1], [3, 2], [4, 1], [5, 3, 4]], times=[2, 1, 3, 4, 5] ) self.assertWalkYields([c5, c4, c3, c1, c2], [c5.id]) self.assertWalkYields([c5, c4, c3, c2, c1], [c5.id], order=ORDER_TOPO) def test_out_of_order_with_exclude(self): # Create the following graph: # c1-------x2---m6 # \ / # \-y3--y4-/--y5 # Due to skew, y5 is the oldest commit. c1, x2, y3, y4, y5, m6 = self.make_commits( - [[1], [2, 1], [3, 1], [4, 3], [5, 4], [6, 2, 4]], times=[2, 3, 4, 5, 1, 6] + [[1], [2, 1], [3, 1], [4, 3], [5, 4], [6, 2, 4]], + times=[2, 3, 4, 5, 1, 6], ) self.assertWalkYields([m6, y4, y3, x2, c1], [m6.id]) # Ensure that c1..y4 get excluded even though they're popped from the # priority queue long before y5. self.assertWalkYields([m6, x2], [m6.id], exclude=[y5.id]) def test_empty_walk(self): c1, c2, c3 = self.make_linear_commits(3) self.assertWalkYields([], [c3.id], exclude=[c3.id]) class WalkEntryTest(TestCase): def setUp(self): super(WalkEntryTest, self).setUp() self.store = MemoryObjectStore() def make_commits(self, commit_spec, **kwargs): times = kwargs.pop("times", []) attrs = kwargs.pop("attrs", {}) for i, t in enumerate(times): attrs.setdefault(i + 1, {})["commit_time"] = t return build_commit_graph(self.store, commit_spec, attrs=attrs, **kwargs) def make_linear_commits(self, num_commits, **kwargs): commit_spec = [] for i in range(1, num_commits + 1): c = [i] if i > 1: c.append(i - 1) commit_spec.append(c) return self.make_commits(commit_spec, **kwargs) def test_all_changes(self): # Construct a commit with 2 files in different subdirectories. blob_a = make_object(Blob, data=b"a") blob_b = make_object(Blob, data=b"b") c1 = self.make_linear_commits( 1, trees={1: [(b"x/a", blob_a), (b"y/b", blob_b)]}, )[0] # Get the WalkEntry for the commit. walker = Walker(self.store, c1.id) walker_entry = list(walker)[0] changes = walker_entry.changes() # Compare the changes with the expected values. entry_a = (b"x/a", F, blob_a.id) entry_b = (b"y/b", F, blob_b.id) self.assertEqual( [TreeChange.add(entry_a), TreeChange.add(entry_b)], changes, ) def test_all_with_merge(self): blob_a = make_object(Blob, data=b"a") blob_a2 = make_object(Blob, data=b"a2") blob_b = make_object(Blob, data=b"b") blob_b2 = make_object(Blob, data=b"b2") x1, y2, m3 = self.make_commits( [[1], [2], [3, 1, 2]], trees={ 1: [(b"x/a", blob_a)], 2: [(b"y/b", blob_b)], 3: [(b"x/a", blob_a2), (b"y/b", blob_b2)], }, ) # Get the WalkEntry for the merge commit. walker = Walker(self.store, m3.id) entries = list(walker) walker_entry = entries[0] self.assertEqual(walker_entry.commit.id, m3.id) changes = walker_entry.changes() self.assertEqual(2, len(changes)) entry_a = (b"x/a", F, blob_a.id) entry_a2 = (b"x/a", F, blob_a2.id) entry_b = (b"y/b", F, blob_b.id) entry_b2 = (b"y/b", F, blob_b2.id) self.assertEqual( [ [ TreeChange(CHANGE_MODIFY, entry_a, entry_a2), TreeChange.add(entry_a2), ], [ TreeChange.add(entry_b2), TreeChange(CHANGE_MODIFY, entry_b, entry_b2), ], ], changes, ) def test_filter_changes(self): # Construct a commit with 2 files in different subdirectories. blob_a = make_object(Blob, data=b"a") blob_b = make_object(Blob, data=b"b") c1 = self.make_linear_commits( 1, trees={1: [(b"x/a", blob_a), (b"y/b", blob_b)]}, )[0] # Get the WalkEntry for the commit. walker = Walker(self.store, c1.id) walker_entry = list(walker)[0] changes = walker_entry.changes(path_prefix=b"x") # Compare the changes with the expected values. entry_a = (b"a", F, blob_a.id) self.assertEqual( [TreeChange.add(entry_a)], changes, ) def test_filter_with_merge(self): blob_a = make_object(Blob, data=b"a") blob_a2 = make_object(Blob, data=b"a2") blob_b = make_object(Blob, data=b"b") blob_b2 = make_object(Blob, data=b"b2") x1, y2, m3 = self.make_commits( [[1], [2], [3, 1, 2]], trees={ 1: [(b"x/a", blob_a)], 2: [(b"y/b", blob_b)], 3: [(b"x/a", blob_a2), (b"y/b", blob_b2)], }, ) # Get the WalkEntry for the merge commit. walker = Walker(self.store, m3.id) entries = list(walker) walker_entry = entries[0] self.assertEqual(walker_entry.commit.id, m3.id) changes = walker_entry.changes(b"x") self.assertEqual(1, len(changes)) entry_a = (b"a", F, blob_a.id) entry_a2 = (b"a", F, blob_a2.id) self.assertEqual( [[TreeChange(CHANGE_MODIFY, entry_a, entry_a2)]], changes, ) diff --git a/dulwich/tests/test_web.py b/dulwich/tests/test_web.py index 0f9c4f58..6541c7c1 100644 --- a/dulwich/tests/test_web.py +++ b/dulwich/tests/test_web.py @@ -1,581 +1,590 @@ # test_web.py -- Tests for the git HTTP server # Copyright (C) 2010 Google, Inc. # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Tests for the Git HTTP server.""" from io import BytesIO import gzip import re import os from typing import Type from dulwich.object_store import ( MemoryObjectStore, ) from dulwich.objects import ( Blob, ) from dulwich.repo import ( BaseRepo, MemoryRepo, ) from dulwich.server import ( DictBackend, ) from dulwich.tests import ( TestCase, ) from dulwich.web import ( HTTP_OK, HTTP_NOT_FOUND, HTTP_FORBIDDEN, HTTP_ERROR, GunzipFilter, send_file, get_text_file, get_loose_object, get_pack_file, get_idx_file, get_info_refs, get_info_packs, handle_service_request, _LengthLimitedFile, HTTPGitRequest, HTTPGitApplication, ) from dulwich.tests.utils import ( make_object, make_tag, ) class MinimalistWSGIInputStream(object): """WSGI input stream with no 'seek()' and 'tell()' methods.""" def __init__(self, data): self.data = data self.pos = 0 def read(self, howmuch): start = self.pos end = self.pos + howmuch if start >= len(self.data): return "" self.pos = end return self.data[start:end] class MinimalistWSGIInputStream2(MinimalistWSGIInputStream): """WSGI input stream with no *working* 'seek()' and 'tell()' methods.""" def seek(self, pos): raise NotImplementedError def tell(self): raise NotImplementedError class TestHTTPGitRequest(HTTPGitRequest): """HTTPGitRequest with overridden methods to help test caching.""" def __init__(self, *args, **kwargs): HTTPGitRequest.__init__(self, *args, **kwargs) self.cached = None def nocache(self): self.cached = False def cache_forever(self): self.cached = True class WebTestCase(TestCase): """Base TestCase with useful instance vars and utility functions.""" _req_class = TestHTTPGitRequest # type: Type[HTTPGitRequest] def setUp(self): super(WebTestCase, self).setUp() self._environ = {} self._req = self._req_class( self._environ, self._start_response, handlers=self._handlers() ) self._status = None self._headers = [] self._output = BytesIO() def _start_response(self, status, headers): self._status = status self._headers = list(headers) return self._output.write def _handlers(self): return None def assertContentTypeEquals(self, expected): self.assertTrue(("Content-Type", expected) in self._headers) def _test_backend(objects, refs=None, named_files=None): if not refs: refs = {} if not named_files: named_files = {} repo = MemoryRepo.init_bare(objects, refs) for path, contents in named_files.items(): repo._put_named_file(path, contents) return DictBackend({"/": repo}) class DumbHandlersTestCase(WebTestCase): def test_send_file_not_found(self): list(send_file(self._req, None, "text/plain")) self.assertEqual(HTTP_NOT_FOUND, self._status) def test_send_file(self): f = BytesIO(b"foobar") output = b"".join(send_file(self._req, f, "some/thing")) self.assertEqual(b"foobar", output) self.assertEqual(HTTP_OK, self._status) self.assertContentTypeEquals("some/thing") self.assertTrue(f.closed) def test_send_file_buffered(self): bufsize = 10240 xs = b"x" * bufsize f = BytesIO(2 * xs) self.assertEqual([xs, xs], list(send_file(self._req, f, "some/thing"))) self.assertEqual(HTTP_OK, self._status) self.assertContentTypeEquals("some/thing") self.assertTrue(f.closed) def test_send_file_error(self): class TestFile(object): def __init__(self, exc_class): self.closed = False self._exc_class = exc_class def read(self, size=-1): raise self._exc_class() def close(self): self.closed = True f = TestFile(IOError) list(send_file(self._req, f, "some/thing")) self.assertEqual(HTTP_ERROR, self._status) self.assertTrue(f.closed) self.assertFalse(self._req.cached) # non-IOErrors are reraised f = TestFile(AttributeError) self.assertRaises(AttributeError, list, send_file(self._req, f, "some/thing")) self.assertTrue(f.closed) self.assertFalse(self._req.cached) def test_get_text_file(self): backend = _test_backend([], named_files={"description": b"foo"}) mat = re.search(".*", "description") output = b"".join(get_text_file(self._req, backend, mat)) self.assertEqual(b"foo", output) self.assertEqual(HTTP_OK, self._status) self.assertContentTypeEquals("text/plain") self.assertFalse(self._req.cached) def test_get_loose_object(self): blob = make_object(Blob, data=b"foo") backend = _test_backend([blob]) mat = re.search("^(..)(.{38})$", blob.id.decode("ascii")) output = b"".join(get_loose_object(self._req, backend, mat)) self.assertEqual(blob.as_legacy_object(), output) self.assertEqual(HTTP_OK, self._status) self.assertContentTypeEquals("application/x-git-loose-object") self.assertTrue(self._req.cached) def test_get_loose_object_missing(self): mat = re.search("^(..)(.{38})$", "1" * 40) list(get_loose_object(self._req, _test_backend([]), mat)) self.assertEqual(HTTP_NOT_FOUND, self._status) def test_get_loose_object_error(self): blob = make_object(Blob, data=b"foo") backend = _test_backend([blob]) mat = re.search("^(..)(.{38})$", blob.id.decode("ascii")) def as_legacy_object_error(self): raise IOError self.addCleanup(setattr, Blob, "as_legacy_object", Blob.as_legacy_object) Blob.as_legacy_object = as_legacy_object_error list(get_loose_object(self._req, backend, mat)) self.assertEqual(HTTP_ERROR, self._status) def test_get_pack_file(self): pack_name = os.path.join("objects", "pack", "pack-%s.pack" % ("1" * 40)) backend = _test_backend([], named_files={pack_name: b"pack contents"}) mat = re.search(".*", pack_name) output = b"".join(get_pack_file(self._req, backend, mat)) self.assertEqual(b"pack contents", output) self.assertEqual(HTTP_OK, self._status) self.assertContentTypeEquals("application/x-git-packed-objects") self.assertTrue(self._req.cached) def test_get_idx_file(self): idx_name = os.path.join("objects", "pack", "pack-%s.idx" % ("1" * 40)) backend = _test_backend([], named_files={idx_name: b"idx contents"}) mat = re.search(".*", idx_name) output = b"".join(get_idx_file(self._req, backend, mat)) self.assertEqual(b"idx contents", output) self.assertEqual(HTTP_OK, self._status) self.assertContentTypeEquals("application/x-git-packed-objects-toc") self.assertTrue(self._req.cached) def test_get_info_refs(self): self._environ["QUERY_STRING"] = "" blob1 = make_object(Blob, data=b"1") blob2 = make_object(Blob, data=b"2") blob3 = make_object(Blob, data=b"3") tag1 = make_tag(blob2, name=b"tag-tag") objects = [blob1, blob2, blob3, tag1] refs = { b"HEAD": b"000", b"refs/heads/master": blob1.id, b"refs/tags/tag-tag": tag1.id, b"refs/tags/blob-tag": blob3.id, } backend = _test_backend(objects, refs=refs) mat = re.search(".*", "//info/refs") self.assertEqual( [ blob1.id + b"\trefs/heads/master\n", blob3.id + b"\trefs/tags/blob-tag\n", tag1.id + b"\trefs/tags/tag-tag\n", blob2.id + b"\trefs/tags/tag-tag^{}\n", ], list(get_info_refs(self._req, backend, mat)), ) self.assertEqual(HTTP_OK, self._status) self.assertContentTypeEquals("text/plain") self.assertFalse(self._req.cached) def test_get_info_refs_not_found(self): self._environ["QUERY_STRING"] = "" objects = [] refs = {} backend = _test_backend(objects, refs=refs) mat = re.search("info/refs", "/foo/info/refs") self.assertEqual( [b"No git repository was found at /foo"], list(get_info_refs(self._req, backend, mat)), ) self.assertEqual(HTTP_NOT_FOUND, self._status) self.assertContentTypeEquals("text/plain") def test_get_info_packs(self): class TestPackData(object): def __init__(self, sha): self.filename = "pack-%s.pack" % sha class TestPack(object): def __init__(self, sha): self.data = TestPackData(sha) packs = [TestPack(str(i) * 40) for i in range(1, 4)] class TestObjectStore(MemoryObjectStore): # property must be overridden, can't be assigned @property def packs(self): return packs store = TestObjectStore() repo = BaseRepo(store, None) backend = DictBackend({"/": repo}) mat = re.search(".*", "//info/packs") output = b"".join(get_info_packs(self._req, backend, mat)) expected = b"".join( [(b"P pack-" + s + b".pack\n") for s in [b"1" * 40, b"2" * 40, b"3" * 40]] ) self.assertEqual(expected, output) self.assertEqual(HTTP_OK, self._status) self.assertContentTypeEquals("text/plain") self.assertFalse(self._req.cached) class SmartHandlersTestCase(WebTestCase): class _TestUploadPackHandler(object): def __init__( - self, backend, args, proto, stateless_rpc=None, advertise_refs=False + self, + backend, + args, + proto, + stateless_rpc=None, + advertise_refs=False, ): self.args = args self.proto = proto self.stateless_rpc = stateless_rpc self.advertise_refs = advertise_refs def handle(self): self.proto.write(b"handled input: " + self.proto.recv(1024)) def _make_handler(self, *args, **kwargs): self._handler = self._TestUploadPackHandler(*args, **kwargs) return self._handler def _handlers(self): return {b"git-upload-pack": self._make_handler} def test_handle_service_request_unknown(self): mat = re.search(".*", "/git-evil-handler") content = list(handle_service_request(self._req, "backend", mat)) self.assertEqual(HTTP_FORBIDDEN, self._status) self.assertFalse(b"git-evil-handler" in b"".join(content)) self.assertFalse(self._req.cached) def _run_handle_service_request(self, content_length=None): self._environ["wsgi.input"] = BytesIO(b"foo") if content_length is not None: self._environ["CONTENT_LENGTH"] = content_length mat = re.search(".*", "/git-upload-pack") class Backend(object): def open_repository(self, path): return None handler_output = b"".join(handle_service_request(self._req, Backend(), mat)) write_output = self._output.getvalue() # Ensure all output was written via the write callback. self.assertEqual(b"", handler_output) self.assertEqual(b"handled input: foo", write_output) self.assertContentTypeEquals("application/x-git-upload-pack-result") self.assertFalse(self._handler.advertise_refs) self.assertTrue(self._handler.stateless_rpc) self.assertFalse(self._req.cached) def test_handle_service_request(self): self._run_handle_service_request() def test_handle_service_request_with_length(self): self._run_handle_service_request(content_length="3") def test_handle_service_request_empty_length(self): self._run_handle_service_request(content_length="") def test_get_info_refs_unknown(self): self._environ["QUERY_STRING"] = "service=git-evil-handler" class Backend(object): def open_repository(self, url): return None mat = re.search(".*", "/git-evil-pack") content = list(get_info_refs(self._req, Backend(), mat)) self.assertFalse(b"git-evil-handler" in b"".join(content)) self.assertEqual(HTTP_FORBIDDEN, self._status) self.assertFalse(self._req.cached) def test_get_info_refs(self): self._environ["wsgi.input"] = BytesIO(b"foo") self._environ["QUERY_STRING"] = "service=git-upload-pack" class Backend(object): def open_repository(self, url): return None mat = re.search(".*", "/git-upload-pack") handler_output = b"".join(get_info_refs(self._req, Backend(), mat)) write_output = self._output.getvalue() self.assertEqual( ( b"001e# service=git-upload-pack\n" b"0000" # input is ignored by the handler b"handled input: " ), write_output, ) # Ensure all output was written via the write callback. self.assertEqual(b"", handler_output) self.assertTrue(self._handler.advertise_refs) self.assertTrue(self._handler.stateless_rpc) self.assertFalse(self._req.cached) class LengthLimitedFileTestCase(TestCase): def test_no_cutoff(self): f = _LengthLimitedFile(BytesIO(b"foobar"), 1024) self.assertEqual(b"foobar", f.read()) def test_cutoff(self): f = _LengthLimitedFile(BytesIO(b"foobar"), 3) self.assertEqual(b"foo", f.read()) self.assertEqual(b"", f.read()) def test_multiple_reads(self): f = _LengthLimitedFile(BytesIO(b"foobar"), 3) self.assertEqual(b"fo", f.read(2)) self.assertEqual(b"o", f.read(2)) self.assertEqual(b"", f.read()) class HTTPGitRequestTestCase(WebTestCase): # This class tests the contents of the actual cache headers _req_class = HTTPGitRequest def test_not_found(self): self._req.cache_forever() # cache headers should be discarded message = "Something not found" self.assertEqual(message.encode("ascii"), self._req.not_found(message)) self.assertEqual(HTTP_NOT_FOUND, self._status) self.assertEqual(set([("Content-Type", "text/plain")]), set(self._headers)) def test_forbidden(self): self._req.cache_forever() # cache headers should be discarded message = "Something not found" self.assertEqual(message.encode("ascii"), self._req.forbidden(message)) self.assertEqual(HTTP_FORBIDDEN, self._status) self.assertEqual(set([("Content-Type", "text/plain")]), set(self._headers)) def test_respond_ok(self): self._req.respond() self.assertEqual([], self._headers) self.assertEqual(HTTP_OK, self._status) def test_respond(self): self._req.nocache() self._req.respond( status=402, content_type="some/type", headers=[("X-Foo", "foo"), ("X-Bar", "bar")], ) self.assertEqual( set( [ ("X-Foo", "foo"), ("X-Bar", "bar"), ("Content-Type", "some/type"), ("Expires", "Fri, 01 Jan 1980 00:00:00 GMT"), ("Pragma", "no-cache"), ("Cache-Control", "no-cache, max-age=0, must-revalidate"), ] ), set(self._headers), ) self.assertEqual(402, self._status) class HTTPGitApplicationTestCase(TestCase): def setUp(self): super(HTTPGitApplicationTestCase, self).setUp() self._app = HTTPGitApplication("backend") self._environ = { "PATH_INFO": "/foo", "REQUEST_METHOD": "GET", } def _test_handler(self, req, backend, mat): # tests interface used by all handlers self.assertEqual(self._environ, req.environ) self.assertEqual("backend", backend) self.assertEqual("/foo", mat.group(0)) return "output" def _add_handler(self, app): req = self._environ["REQUEST_METHOD"] app.services = { (req, re.compile("/foo$")): self._test_handler, } def test_call(self): self._add_handler(self._app) self.assertEqual("output", self._app(self._environ, None)) def test_fallback_app(self): def test_app(environ, start_response): return "output" app = HTTPGitApplication("backend", fallback_app=test_app) self.assertEqual("output", app(self._environ, None)) class GunzipTestCase(HTTPGitApplicationTestCase): __doc__ = """TestCase for testing the GunzipFilter, ensuring the wsgi.input is correctly decompressed and headers are corrected. """ example_text = __doc__.encode("ascii") def setUp(self): super(GunzipTestCase, self).setUp() self._app = GunzipFilter(self._app) self._environ["HTTP_CONTENT_ENCODING"] = "gzip" self._environ["REQUEST_METHOD"] = "POST" def _get_zstream(self, text): zstream = BytesIO() zfile = gzip.GzipFile(fileobj=zstream, mode="w") zfile.write(text) zfile.close() zlength = zstream.tell() zstream.seek(0) return zstream, zlength def _test_call(self, orig, zstream, zlength): self._add_handler(self._app.app) self.assertLess(zlength, len(orig)) self.assertEqual(self._environ["HTTP_CONTENT_ENCODING"], "gzip") self._environ["CONTENT_LENGTH"] = zlength self._environ["wsgi.input"] = zstream self._app(self._environ, None) buf = self._environ["wsgi.input"] self.assertIsNot(buf, zstream) buf.seek(0) self.assertEqual(orig, buf.read()) self.assertIs(None, self._environ.get("CONTENT_LENGTH")) self.assertNotIn("HTTP_CONTENT_ENCODING", self._environ) def test_call(self): self._test_call(self.example_text, *self._get_zstream(self.example_text)) def test_call_no_seek(self): """ This ensures that the gunzipping code doesn't require any methods on 'wsgi.input' except for '.read()'. (In particular, it shouldn't require '.seek()'. See https://github.com/jelmer/dulwich/issues/140.) """ zstream, zlength = self._get_zstream(self.example_text) self._test_call( - self.example_text, MinimalistWSGIInputStream(zstream.read()), zlength + self.example_text, + MinimalistWSGIInputStream(zstream.read()), + zlength, ) def test_call_no_working_seek(self): """ Similar to 'test_call_no_seek', but this time the methods are available (but defunct). See https://github.com/jonashaag/klaus/issues/154. """ zstream, zlength = self._get_zstream(self.example_text) self._test_call( - self.example_text, MinimalistWSGIInputStream2(zstream.read()), zlength + self.example_text, + MinimalistWSGIInputStream2(zstream.read()), + zlength, ) diff --git a/dulwich/tests/utils.py b/dulwich/tests/utils.py index e7245cfb..09f92cb3 100644 --- a/dulwich/tests/utils.py +++ b/dulwich/tests/utils.py @@ -1,374 +1,378 @@ # utils.py -- Test utilities for Dulwich. # Copyright (C) 2010 Google, Inc. # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """Utility functions common to Dulwich tests.""" import datetime import os import shutil import tempfile import time import types import warnings from dulwich.index import ( commit_tree, ) from dulwich.objects import ( FixedSha, Commit, Tag, object_class, ) from dulwich.pack import ( OFS_DELTA, REF_DELTA, DELTA_TYPES, obj_sha, SHA1Writer, write_pack_header, write_pack_object, create_delta, ) from dulwich.repo import Repo from dulwich.tests import ( # noqa: F401 skipIf, SkipTest, ) # Plain files are very frequently used in tests, so let the mode be very short. F = 0o100644 # Shorthand mode for Files. def open_repo(name, temp_dir=None): """Open a copy of a repo in a temporary directory. Use this function for accessing repos in dulwich/tests/data/repos to avoid accidentally or intentionally modifying those repos in place. Use tear_down_repo to delete any temp files created. Args: name: The name of the repository, relative to dulwich/tests/data/repos temp_dir: temporary directory to initialize to. If not provided, a temporary directory will be created. Returns: An initialized Repo object that lives in a temporary directory. """ if temp_dir is None: temp_dir = tempfile.mkdtemp() repo_dir = os.path.join(os.path.dirname(__file__), "data", "repos", name) temp_repo_dir = os.path.join(temp_dir, name) shutil.copytree(repo_dir, temp_repo_dir, symlinks=True) return Repo(temp_repo_dir) def tear_down_repo(repo): """Tear down a test repository.""" repo.close() temp_dir = os.path.dirname(repo.path.rstrip(os.sep)) shutil.rmtree(temp_dir) def make_object(cls, **attrs): """Make an object for testing and assign some members. This method creates a new subclass to allow arbitrary attribute reassignment, which is not otherwise possible with objects having __slots__. Args: attrs: dict of attributes to set on the new object. Returns: A newly initialized object of type cls. """ class TestObject(cls): """Class that inherits from the given class, but without __slots__. Note that classes with __slots__ can't have arbitrary attributes monkey-patched in, so this is a class that is exactly the same only with a __dict__ instead of __slots__. """ pass TestObject.__name__ = "TestObject_" + cls.__name__ obj = TestObject() for name, value in attrs.items(): if name == "id": # id property is read-only, so we overwrite sha instead. sha = FixedSha(value) obj.sha = lambda: sha else: setattr(obj, name, value) return obj def make_commit(**attrs): """Make a Commit object with a default set of members. Args: attrs: dict of attributes to overwrite from the default values. Returns: A newly initialized Commit object. """ default_time = 1262304000 # 2010-01-01 00:00:00 all_attrs = { "author": b"Test Author ", "author_time": default_time, "author_timezone": 0, "committer": b"Test Committer ", "commit_time": default_time, "commit_timezone": 0, "message": b"Test message.", "parents": [], "tree": b"0" * 40, } all_attrs.update(attrs) return make_object(Commit, **all_attrs) def make_tag(target, **attrs): """Make a Tag object with a default set of values. Args: target: object to be tagged (Commit, Blob, Tree, etc) attrs: dict of attributes to overwrite from the default values. Returns: A newly initialized Tag object. """ target_id = target.id target_type = object_class(target.type_name) default_time = int(time.mktime(datetime.datetime(2010, 1, 1).timetuple())) all_attrs = { "tagger": b"Test Author ", "tag_time": default_time, "tag_timezone": 0, "message": b"Test message.", "object": (target_type, target_id), "name": b"Test Tag", } all_attrs.update(attrs) return make_object(Tag, **all_attrs) def functest_builder(method, func): """Generate a test method that tests the given function.""" def do_test(self): method(self, func) return do_test def ext_functest_builder(method, func): """Generate a test method that tests the given extension function. This is intended to generate test methods that test both a pure-Python version and an extension version using common test code. The extension test will raise SkipTest if the extension is not found. Sample usage: class MyTest(TestCase); def _do_some_test(self, func_impl): self.assertEqual('foo', func_impl()) test_foo = functest_builder(_do_some_test, foo_py) test_foo_extension = ext_functest_builder(_do_some_test, _foo_c) Args: method: The method to run. It must must two parameters, self and the function implementation to test. func: The function implementation to pass to method. """ def do_test(self): if not isinstance(func, types.BuiltinFunctionType): raise SkipTest("%s extension not found" % func) method(self, func) return do_test def build_pack(f, objects_spec, store=None): """Write test pack data from a concise spec. Args: f: A file-like object to write the pack to. objects_spec: A list of (type_num, obj). For non-delta types, obj is the string of that object's data. For delta types, obj is a tuple of (base, data), where: * base can be either an index in objects_spec of the base for that * delta; or for a ref delta, a SHA, in which case the resulting pack * will be thin and the base will be an external ref. * data is a string of the full, non-deltified data for that object. Note that offsets/refs and deltas are computed within this function. store: An optional ObjectStore for looking up external refs. Returns: A list of tuples in the order specified by objects_spec: (offset, type num, data, sha, CRC32) """ sf = SHA1Writer(f) num_objects = len(objects_spec) write_pack_header(sf, num_objects) full_objects = {} offsets = {} crc32s = {} while len(full_objects) < num_objects: for i, (type_num, data) in enumerate(objects_spec): if type_num not in DELTA_TYPES: full_objects[i] = (type_num, data, obj_sha(type_num, [data])) continue base, data = data if isinstance(base, int): if base not in full_objects: continue base_type_num, _, _ = full_objects[base] else: base_type_num, _ = store.get_raw(base) - full_objects[i] = (base_type_num, data, obj_sha(base_type_num, [data])) + full_objects[i] = ( + base_type_num, + data, + obj_sha(base_type_num, [data]), + ) for i, (type_num, obj) in enumerate(objects_spec): offset = f.tell() if type_num == OFS_DELTA: base_index, data = obj base = offset - offsets[base_index] _, base_data, _ = full_objects[base_index] obj = (base, create_delta(base_data, data)) elif type_num == REF_DELTA: base_ref, data = obj if isinstance(base_ref, int): _, base_data, base = full_objects[base_ref] else: base_type_num, base_data = store.get_raw(base_ref) base = obj_sha(base_type_num, base_data) obj = (base, create_delta(base_data, data)) crc32 = write_pack_object(sf, type_num, obj) offsets[i] = offset crc32s[i] = crc32 expected = [] for i in range(num_objects): type_num, data, sha = full_objects[i] assert len(sha) == 20 expected.append((offsets[i], type_num, data, sha, crc32s[i])) sf.write_sha() f.seek(0) return expected def build_commit_graph(object_store, commit_spec, trees=None, attrs=None): """Build a commit graph from a concise specification. Sample usage: >>> c1, c2, c3 = build_commit_graph(store, [[1], [2, 1], [3, 1, 2]]) >>> store[store[c3].parents[0]] == c1 True >>> store[store[c3].parents[1]] == c2 True If not otherwise specified, commits will refer to the empty tree and have commit times increasing in the same order as the commit spec. Args: object_store: An ObjectStore to commit objects to. commit_spec: An iterable of iterables of ints defining the commit graph. Each entry defines one commit, and entries must be in topological order. The first element of each entry is a commit number, and the remaining elements are its parents. The commit numbers are only meaningful for the call to make_commits; since real commit objects are created, they will get created with real, opaque SHAs. trees: An optional dict of commit number -> tree spec for building trees for commits. The tree spec is an iterable of (path, blob, mode) or (path, blob) entries; if mode is omitted, it defaults to the normal file mode (0100644). attrs: A dict of commit number -> (dict of attribute -> value) for assigning additional values to the commits. Returns: The list of commit objects created. Raises: ValueError: If an undefined commit identifier is listed as a parent. """ if trees is None: trees = {} if attrs is None: attrs = {} commit_time = 0 nums = {} commits = [] for commit in commit_spec: commit_num = commit[0] try: parent_ids = [nums[pn] for pn in commit[1:]] except KeyError as e: (missing_parent,) = e.args raise ValueError("Unknown parent %i" % missing_parent) blobs = [] for entry in trees.get(commit_num, []): if len(entry) == 2: path, blob = entry entry = (path, blob, F) path, blob, mode = entry blobs.append((path, blob.id, mode)) object_store.add_object(blob) tree_id = commit_tree(object_store, blobs) commit_attrs = { "message": ("Commit %i" % commit_num).encode("ascii"), "parents": parent_ids, "tree": tree_id, "commit_time": commit_time, } commit_attrs.update(attrs.get(commit_num, {})) commit_obj = make_commit(**commit_attrs) # By default, increment the time by a lot. Out-of-order commits should # be closer together than this because their main cause is clock skew. commit_time = commit_attrs["commit_time"] + 100 nums[commit_num] = commit_obj.id object_store.add_object(commit_obj) commits.append(commit_obj) return commits def setup_warning_catcher(): """Wrap warnings.showwarning with code that records warnings.""" caught_warnings = [] original_showwarning = warnings.showwarning def custom_showwarning(*args, **kwargs): caught_warnings.append(args[0]) warnings.showwarning = custom_showwarning def restore_showwarning(): warnings.showwarning = original_showwarning return caught_warnings, restore_showwarning diff --git a/dulwich/walk.py b/dulwich/walk.py index faae59e3..40a0303f 100644 --- a/dulwich/walk.py +++ b/dulwich/walk.py @@ -1,433 +1,436 @@ # walk.py -- General implementation of walking commits and their contents. # Copyright (C) 2010 Google, Inc. # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """General implementation of walking commits and their contents.""" import collections import heapq from itertools import chain from dulwich.diff_tree import ( RENAME_CHANGE_TYPES, tree_changes, tree_changes_for_merge, RenameDetector, ) from dulwich.errors import ( MissingCommitError, ) from dulwich.objects import ( Tag, ) ORDER_DATE = "date" ORDER_TOPO = "topo" ALL_ORDERS = (ORDER_DATE, ORDER_TOPO) # Maximum number of commits to walk past a commit time boundary. _MAX_EXTRA_COMMITS = 5 class WalkEntry(object): """Object encapsulating a single result from a walk.""" def __init__(self, walker, commit): self.commit = commit self._store = walker.store self._get_parents = walker.get_parents self._changes = {} self._rename_detector = walker.rename_detector def changes(self, path_prefix=None): """Get the tree changes for this entry. Args: path_prefix: Portion of the path in the repository to use to filter changes. Must be a directory name. Must be a full, valid, path reference (no partial names or wildcards). Returns: For commits with up to one parent, a list of TreeChange objects; if the commit has no parents, these will be relative to the empty tree. For merge commits, a list of lists of TreeChange objects; see dulwich.diff.tree_changes_for_merge. """ cached = self._changes.get(path_prefix) if cached is None: commit = self.commit if not self._get_parents(commit): changes_func = tree_changes parent = None elif len(self._get_parents(commit)) == 1: changes_func = tree_changes parent = self._store[self._get_parents(commit)[0]].tree if path_prefix: mode, subtree_sha = parent.lookup_path( self._store.__getitem__, path_prefix, ) parent = self._store[subtree_sha] else: changes_func = tree_changes_for_merge parent = [self._store[p].tree for p in self._get_parents(commit)] if path_prefix: parent_trees = [self._store[p] for p in parent] parent = [] for p in parent_trees: try: mode, st = p.lookup_path( self._store.__getitem__, path_prefix, ) except KeyError: pass else: parent.append(st) commit_tree_sha = commit.tree if path_prefix: commit_tree = self._store[commit_tree_sha] mode, commit_tree_sha = commit_tree.lookup_path( self._store.__getitem__, path_prefix, ) cached = list( changes_func( self._store, parent, commit_tree_sha, rename_detector=self._rename_detector, ) ) self._changes[path_prefix] = cached return self._changes[path_prefix] def __repr__(self): - return "" % (self.commit.id, self.changes()) + return "" % ( + self.commit.id, + self.changes(), + ) class _CommitTimeQueue(object): """Priority queue of WalkEntry objects by commit time.""" def __init__(self, walker): self._walker = walker self._store = walker.store self._get_parents = walker.get_parents self._excluded = walker.excluded self._pq = [] self._pq_set = set() self._seen = set() self._done = set() self._min_time = walker.since self._last = None self._extra_commits_left = _MAX_EXTRA_COMMITS self._is_finished = False for commit_id in chain(walker.include, walker.excluded): self._push(commit_id) def _push(self, object_id): try: obj = self._store[object_id] except KeyError: raise MissingCommitError(object_id) if isinstance(obj, Tag): self._push(obj.object[1]) return # TODO(jelmer): What to do about non-Commit and non-Tag objects? commit = obj if commit.id not in self._pq_set and commit.id not in self._done: heapq.heappush(self._pq, (-commit.commit_time, commit)) self._pq_set.add(commit.id) self._seen.add(commit.id) def _exclude_parents(self, commit): excluded = self._excluded seen = self._seen todo = [commit] while todo: commit = todo.pop() for parent in self._get_parents(commit): if parent not in excluded and parent in seen: # TODO: This is inefficient unless the object store does # some caching (which DiskObjectStore currently does not). # We could either add caching in this class or pass around # parsed queue entry objects instead of commits. todo.append(self._store[parent]) excluded.add(parent) def next(self): if self._is_finished: return None while self._pq: _, commit = heapq.heappop(self._pq) sha = commit.id self._pq_set.remove(sha) if sha in self._done: continue self._done.add(sha) for parent_id in self._get_parents(commit): self._push(parent_id) reset_extra_commits = True is_excluded = sha in self._excluded if is_excluded: self._exclude_parents(commit) if self._pq and all(c.id in self._excluded for _, c in self._pq): _, n = self._pq[0] if self._last and n.commit_time >= self._last.commit_time: # If the next commit is newer than the last one, we # need to keep walking in case its parents (which we # may not have seen yet) are excluded. This gives the # excluded set a chance to "catch up" while the commit # is still in the Walker's output queue. reset_extra_commits = True else: reset_extra_commits = False if self._min_time is not None and commit.commit_time < self._min_time: # We want to stop walking at min_time, but commits at the # boundary may be out of order with respect to their parents. # So we walk _MAX_EXTRA_COMMITS more commits once we hit this # boundary. reset_extra_commits = False if reset_extra_commits: # We're not at a boundary, so reset the counter. self._extra_commits_left = _MAX_EXTRA_COMMITS else: self._extra_commits_left -= 1 if not self._extra_commits_left: break if not is_excluded: self._last = commit return WalkEntry(self._walker, commit) self._is_finished = True return None __next__ = next class Walker(object): """Object for performing a walk of commits in a store. Walker objects are initialized with a store and other options and can then be treated as iterators of Commit objects. """ def __init__( self, store, include, exclude=None, order=ORDER_DATE, reverse=False, max_entries=None, paths=None, rename_detector=None, follow=False, since=None, until=None, get_parents=lambda commit: commit.parents, queue_cls=_CommitTimeQueue, ): """Constructor. Args: store: ObjectStore instance for looking up objects. include: Iterable of SHAs of commits to include along with their ancestors. exclude: Iterable of SHAs of commits to exclude along with their ancestors, overriding includes. order: ORDER_* constant specifying the order of results. Anything other than ORDER_DATE may result in O(n) memory usage. reverse: If True, reverse the order of output, requiring O(n) memory. max_entries: The maximum number of entries to yield, or None for no limit. paths: Iterable of file or subtree paths to show entries for. rename_detector: diff.RenameDetector object for detecting renames. follow: If True, follow path across renames/copies. Forces a default rename_detector. since: Timestamp to list commits after. until: Timestamp to list commits before. get_parents: Method to retrieve the parents of a commit queue_cls: A class to use for a queue of commits, supporting the iterator protocol. The constructor takes a single argument, the Walker. """ # Note: when adding arguments to this method, please also update # dulwich.repo.BaseRepo.get_walker if order not in ALL_ORDERS: raise ValueError("Unknown walk order %s" % order) self.store = store if isinstance(include, bytes): # TODO(jelmer): Really, this should require a single type. # Print deprecation warning here? include = [include] self.include = include self.excluded = set(exclude or []) self.order = order self.reverse = reverse self.max_entries = max_entries self.paths = paths and set(paths) or None if follow and not rename_detector: rename_detector = RenameDetector(store) self.rename_detector = rename_detector self.get_parents = get_parents self.follow = follow self.since = since self.until = until self._num_entries = 0 self._queue = queue_cls(self) self._out_queue = collections.deque() def _path_matches(self, changed_path): if changed_path is None: return False for followed_path in self.paths: if changed_path == followed_path: return True if ( changed_path.startswith(followed_path) and changed_path[len(followed_path)] == b"/"[0] ): return True return False def _change_matches(self, change): if not change: return False old_path = change.old.path new_path = change.new.path if self._path_matches(new_path): if self.follow and change.type in RENAME_CHANGE_TYPES: self.paths.add(old_path) self.paths.remove(new_path) return True elif self._path_matches(old_path): return True return False def _should_return(self, entry): """Determine if a walk entry should be returned.. Args: entry: The WalkEntry to consider. Returns: True if the WalkEntry should be returned by this walk, or False otherwise (e.g. if it doesn't match any requested paths). """ commit = entry.commit if self.since is not None and commit.commit_time < self.since: return False if self.until is not None and commit.commit_time > self.until: return False if commit.id in self.excluded: return False if self.paths is None: return True if len(self.get_parents(commit)) > 1: for path_changes in entry.changes(): # For merge commits, only include changes with conflicts for # this path. Since a rename conflict may include different # old.paths, we have to check all of them. for change in path_changes: if self._change_matches(change): return True else: for change in entry.changes(): if self._change_matches(change): return True return None def _next(self): max_entries = self.max_entries while max_entries is None or self._num_entries < max_entries: entry = next(self._queue) if entry is not None: self._out_queue.append(entry) if entry is None or len(self._out_queue) > _MAX_EXTRA_COMMITS: if not self._out_queue: return None entry = self._out_queue.popleft() if self._should_return(entry): self._num_entries += 1 return entry return None def _reorder(self, results): """Possibly reorder a results iterator. Args: results: An iterator of WalkEntry objects, in the order returned from the queue_cls. Returns: An iterator or list of WalkEntry objects, in the order required by the Walker. """ if self.order == ORDER_TOPO: results = _topo_reorder(results, self.get_parents) if self.reverse: results = reversed(list(results)) return results def __iter__(self): return iter(self._reorder(iter(self._next, None))) def _topo_reorder(entries, get_parents=lambda commit: commit.parents): """Reorder an iterable of entries topologically. This works best assuming the entries are already in almost-topological order, e.g. in commit time order. Args: entries: An iterable of WalkEntry objects. get_parents: Optional function for getting the parents of a commit. Returns: iterator over WalkEntry objects from entries in FIFO order, except where a parent would be yielded before any of its children. """ todo = collections.deque() pending = {} num_children = collections.defaultdict(int) for entry in entries: todo.append(entry) for p in get_parents(entry.commit): num_children[p] += 1 while todo: entry = todo.popleft() commit = entry.commit commit_id = commit.id if num_children[commit_id]: pending[commit_id] = entry continue for parent_id in get_parents(commit): num_children[parent_id] -= 1 if not num_children[parent_id]: parent_entry = pending.pop(parent_id, None) if parent_entry: todo.appendleft(parent_entry) yield entry diff --git a/dulwich/web.py b/dulwich/web.py index 472f609e..328b3913 100644 --- a/dulwich/web.py +++ b/dulwich/web.py @@ -1,559 +1,581 @@ # web.py -- WSGI smart-http server # Copyright (C) 2010 Google, Inc. # Copyright (C) 2012 Jelmer Vernooij # # Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU # General Public License as public by the Free Software Foundation; version 2.0 # or (at your option) any later version. You can redistribute it and/or # modify it under the terms of either of these two licenses. # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # You should have received a copy of the licenses; if not, see # for a copy of the GNU General Public License # and for a copy of the Apache # License, Version 2.0. # """HTTP server for dulwich that implements the git smart HTTP protocol.""" from io import BytesIO import shutil import tempfile import gzip import os import re import sys import time from typing import List, Tuple, Optional from wsgiref.simple_server import ( WSGIRequestHandler, ServerHandler, WSGIServer, make_server, ) from urllib.parse import parse_qs from dulwich import log_utils from dulwich.protocol import ( ReceivableProtocol, ) from dulwich.repo import ( BaseRepo, NotGitRepository, Repo, ) from dulwich.server import ( DictBackend, DEFAULT_HANDLERS, generate_info_refs, generate_objects_info_packs, ) logger = log_utils.getLogger(__name__) # HTTP error strings HTTP_OK = "200 OK" HTTP_NOT_FOUND = "404 Not Found" HTTP_FORBIDDEN = "403 Forbidden" HTTP_ERROR = "500 Internal Server Error" def date_time_string(timestamp: Optional[float] = None) -> str: # From BaseHTTPRequestHandler.date_time_string in BaseHTTPServer.py in the # Python 2.6.5 standard library, following modifications: # - Made a global rather than an instance method. # - weekdayname and monthname are renamed and locals rather than class # variables. # Copyright (c) 2001-2010 Python Software Foundation; All Rights Reserved weekdays = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] months = [ None, "Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec", ] if timestamp is None: timestamp = time.time() year, month, day, hh, mm, ss, wd = time.gmtime(timestamp)[:7] return "%s, %02d %3s %4d %02d:%02d:%02d GMD" % ( weekdays[wd], day, months[month], year, hh, mm, ss, ) def url_prefix(mat) -> str: """Extract the URL prefix from a regex match. Args: mat: A regex match object. Returns: The URL prefix, defined as the text before the match in the original string. Normalized to start with one leading slash and end with zero. """ return "/" + mat.string[: mat.start()].strip("/") def get_repo(backend, mat) -> BaseRepo: """Get a Repo instance for the given backend and URL regex match.""" return backend.open_repository(url_prefix(mat)) def send_file(req, f, content_type): """Send a file-like object to the request output. Args: req: The HTTPGitRequest object to send output to. f: An open file-like object to send; will be closed. content_type: The MIME type for the file. Returns: Iterator over the contents of the file, as chunks. """ if f is None: yield req.not_found("File not found") return try: req.respond(HTTP_OK, content_type) while True: data = f.read(10240) if not data: break yield data except IOError: yield req.error("Error reading file") finally: f.close() def _url_to_path(url): return url.replace("/", os.path.sep) def get_text_file(req, backend, mat): req.nocache() path = _url_to_path(mat.group()) logger.info("Sending plain text file %s", path) return send_file(req, get_repo(backend, mat).get_named_file(path), "text/plain") def get_loose_object(req, backend, mat): sha = (mat.group(1) + mat.group(2)).encode("ascii") logger.info("Sending loose object %s", sha) object_store = get_repo(backend, mat).object_store if not object_store.contains_loose(sha): yield req.not_found("Object not found") return try: data = object_store[sha].as_legacy_object() except IOError: yield req.error("Error reading object") return req.cache_forever() req.respond(HTTP_OK, "application/x-git-loose-object") yield data def get_pack_file(req, backend, mat): req.cache_forever() path = _url_to_path(mat.group()) logger.info("Sending pack file %s", path) return send_file( req, get_repo(backend, mat).get_named_file(path), "application/x-git-packed-objects", ) def get_idx_file(req, backend, mat): req.cache_forever() path = _url_to_path(mat.group()) logger.info("Sending pack file %s", path) return send_file( req, get_repo(backend, mat).get_named_file(path), "application/x-git-packed-objects-toc", ) def get_info_refs(req, backend, mat): params = parse_qs(req.environ["QUERY_STRING"]) service = params.get("service", [None])[0] try: repo = get_repo(backend, mat) except NotGitRepository as e: yield req.not_found(str(e)) return if service and not req.dumb: handler_cls = req.handlers.get(service.encode("ascii"), None) if handler_cls is None: yield req.forbidden("Unsupported service") return req.nocache() write = req.respond(HTTP_OK, "application/x-%s-advertisement" % service) proto = ReceivableProtocol(BytesIO().read, write) handler = handler_cls( - backend, [url_prefix(mat)], proto, stateless_rpc=req, advertise_refs=True + backend, + [url_prefix(mat)], + proto, + stateless_rpc=req, + advertise_refs=True, ) handler.proto.write_pkt_line(b"# service=" + service.encode("ascii") + b"\n") handler.proto.write_pkt_line(None) handler.handle() else: # non-smart fallback # TODO: select_getanyfile() (see http-backend.c) req.nocache() req.respond(HTTP_OK, "text/plain") logger.info("Emulating dumb info/refs") for text in generate_info_refs(repo): yield text def get_info_packs(req, backend, mat): req.nocache() req.respond(HTTP_OK, "text/plain") logger.info("Emulating dumb info/packs") return generate_objects_info_packs(get_repo(backend, mat)) class _LengthLimitedFile(object): """Wrapper class to limit the length of reads from a file-like object. This is used to ensure EOF is read from the wsgi.input object once Content-Length bytes are read. This behavior is required by the WSGI spec but not implemented in wsgiref as of 2.5. """ def __init__(self, input, max_bytes): self._input = input self._bytes_avail = max_bytes def read(self, size=-1): if self._bytes_avail <= 0: return b"" if size == -1 or size > self._bytes_avail: size = self._bytes_avail self._bytes_avail -= size return self._input.read(size) # TODO: support more methods as necessary def handle_service_request(req, backend, mat): service = mat.group().lstrip("/") logger.info("Handling service request for %s", service) handler_cls = req.handlers.get(service.encode("ascii"), None) if handler_cls is None: yield req.forbidden("Unsupported service") return try: get_repo(backend, mat) except NotGitRepository as e: yield req.not_found(str(e)) return req.nocache() write = req.respond(HTTP_OK, "application/x-%s-result" % service) proto = ReceivableProtocol(req.environ["wsgi.input"].read, write) # TODO(jelmer): Find a way to pass in repo, rather than having handler_cls # reopen. handler = handler_cls(backend, [url_prefix(mat)], proto, stateless_rpc=req) handler.handle() class HTTPGitRequest(object): """Class encapsulating the state of a single git HTTP request. :ivar environ: the WSGI environment for the request. """ def __init__(self, environ, start_response, dumb: bool = False, handlers=None): self.environ = environ self.dumb = dumb self.handlers = handlers self._start_response = start_response self._cache_headers = [] # type: List[Tuple[str, str]] self._headers = [] # type: List[Tuple[str, str]] def add_header(self, name, value): """Add a header to the response.""" self._headers.append((name, value)) def respond( self, status: str = HTTP_OK, content_type: Optional[str] = None, headers: Optional[List[Tuple[str, str]]] = None, ): """Begin a response with the given status and other headers.""" if headers: self._headers.extend(headers) if content_type: self._headers.append(("Content-Type", content_type)) self._headers.extend(self._cache_headers) return self._start_response(status, self._headers) def not_found(self, message: str) -> bytes: """Begin a HTTP 404 response and return the text of a message.""" self._cache_headers = [] logger.info("Not found: %s", message) self.respond(HTTP_NOT_FOUND, "text/plain") return message.encode("ascii") def forbidden(self, message: str) -> bytes: """Begin a HTTP 403 response and return the text of a message.""" self._cache_headers = [] logger.info("Forbidden: %s", message) self.respond(HTTP_FORBIDDEN, "text/plain") return message.encode("ascii") def error(self, message: str) -> bytes: """Begin a HTTP 500 response and return the text of a message.""" self._cache_headers = [] logger.error("Error: %s", message) self.respond(HTTP_ERROR, "text/plain") return message.encode("ascii") def nocache(self) -> None: """Set the response to never be cached by the client.""" self._cache_headers = [ ("Expires", "Fri, 01 Jan 1980 00:00:00 GMT"), ("Pragma", "no-cache"), ("Cache-Control", "no-cache, max-age=0, must-revalidate"), ] def cache_forever(self) -> None: """Set the response to be cached forever by the client.""" now = time.time() self._cache_headers = [ ("Date", date_time_string(now)), ("Expires", date_time_string(now + 31536000)), ("Cache-Control", "public, max-age=31536000"), ] class HTTPGitApplication(object): """Class encapsulating the state of a git WSGI application. :ivar backend: the Backend object backing this application """ services = { ("GET", re.compile("/HEAD$")): get_text_file, ("GET", re.compile("/info/refs$")): get_info_refs, ("GET", re.compile("/objects/info/alternates$")): get_text_file, ("GET", re.compile("/objects/info/http-alternates$")): get_text_file, ("GET", re.compile("/objects/info/packs$")): get_info_packs, - ("GET", re.compile("/objects/([0-9a-f]{2})/([0-9a-f]{38})$")): get_loose_object, - ("GET", re.compile("/objects/pack/pack-([0-9a-f]{40})\\.pack$")): get_pack_file, - ("GET", re.compile("/objects/pack/pack-([0-9a-f]{40})\\.idx$")): get_idx_file, + ( + "GET", + re.compile("/objects/([0-9a-f]{2})/([0-9a-f]{38})$"), + ): get_loose_object, + ( + "GET", + re.compile("/objects/pack/pack-([0-9a-f]{40})\\.pack$"), + ): get_pack_file, + ( + "GET", + re.compile("/objects/pack/pack-([0-9a-f]{40})\\.idx$"), + ): get_idx_file, ("POST", re.compile("/git-upload-pack$")): handle_service_request, ("POST", re.compile("/git-receive-pack$")): handle_service_request, } def __init__(self, backend, dumb: bool = False, handlers=None, fallback_app=None): self.backend = backend self.dumb = dumb self.handlers = dict(DEFAULT_HANDLERS) self.fallback_app = fallback_app if handlers is not None: self.handlers.update(handlers) def __call__(self, environ, start_response): path = environ["PATH_INFO"] method = environ["REQUEST_METHOD"] req = HTTPGitRequest( environ, start_response, dumb=self.dumb, handlers=self.handlers ) # environ['QUERY_STRING'] has qs args handler = None for smethod, spath in self.services.keys(): if smethod != method: continue mat = spath.search(path) if mat: handler = self.services[smethod, spath] break if handler is None: if self.fallback_app is not None: return self.fallback_app(environ, start_response) else: return [req.not_found("Sorry, that method is not supported")] return handler(req, self.backend, mat) class GunzipFilter(object): """WSGI middleware that unzips gzip-encoded requests before passing on to the underlying application. """ def __init__(self, application): self.app = application def __call__(self, environ, start_response): if environ.get("HTTP_CONTENT_ENCODING", "") == "gzip": try: environ["wsgi.input"].tell() wsgi_input = environ["wsgi.input"] except (AttributeError, IOError, NotImplementedError): # The gzip implementation in the standard library of Python 2.x # requires working '.seek()' and '.tell()' methods on the input # stream. Read the data into a temporary file to work around # this limitation. wsgi_input = tempfile.SpooledTemporaryFile(16 * 1024 * 1024) shutil.copyfileobj(environ["wsgi.input"], wsgi_input) wsgi_input.seek(0) environ["wsgi.input"] = gzip.GzipFile( filename=None, fileobj=wsgi_input, mode="r" ) del environ["HTTP_CONTENT_ENCODING"] if "CONTENT_LENGTH" in environ: del environ["CONTENT_LENGTH"] return self.app(environ, start_response) class LimitedInputFilter(object): """WSGI middleware that limits the input length of a request to that specified in Content-Length. """ def __init__(self, application): self.app = application def __call__(self, environ, start_response): # This is not necessary if this app is run from a conforming WSGI # server. Unfortunately, there's no way to tell that at this point. # TODO: git may used HTTP/1.1 chunked encoding instead of specifying # content-length content_length = environ.get("CONTENT_LENGTH", "") if content_length: environ["wsgi.input"] = _LengthLimitedFile( environ["wsgi.input"], int(content_length) ) return self.app(environ, start_response) def make_wsgi_chain(*args, **kwargs): """Factory function to create an instance of HTTPGitApplication, correctly wrapped with needed middleware. """ app = HTTPGitApplication(*args, **kwargs) wrapped_app = LimitedInputFilter(GunzipFilter(app)) return wrapped_app class ServerHandlerLogger(ServerHandler): """ServerHandler that uses dulwich's logger for logging exceptions.""" def log_exception(self, exc_info): logger.exception( - "Exception happened during processing of request", exc_info=exc_info + "Exception happened during processing of request", + exc_info=exc_info, ) def log_message(self, format, *args): logger.info(format, *args) def log_error(self, *args): logger.error(*args) class WSGIRequestHandlerLogger(WSGIRequestHandler): """WSGIRequestHandler that uses dulwich's logger for logging exceptions.""" def log_exception(self, exc_info): logger.exception( - "Exception happened during processing of request", exc_info=exc_info + "Exception happened during processing of request", + exc_info=exc_info, ) def log_message(self, format, *args): logger.info(format, *args) def log_error(self, *args): logger.error(*args) def handle(self): """Handle a single HTTP request""" self.raw_requestline = self.rfile.readline() if not self.parse_request(): # An error code has been sent, just exit return handler = ServerHandlerLogger( self.rfile, self.wfile, self.get_stderr(), self.get_environ() ) handler.request_handler = self # backpointer for logging handler.run(self.server.get_app()) class WSGIServerLogger(WSGIServer): def handle_error(self, request, client_address): """Handle an error. """ logger.exception( "Exception happened during processing of request from %s" % str(client_address) ) def main(argv=sys.argv): """Entry point for starting an HTTP git server.""" import optparse parser = optparse.OptionParser() parser.add_option( "-l", "--listen_address", dest="listen_address", default="localhost", help="Binding IP address.", ) parser.add_option( - "-p", "--port", dest="port", type=int, default=8000, help="Port to listen on." + "-p", + "--port", + dest="port", + type=int, + default=8000, + help="Port to listen on.", ) options, args = parser.parse_args(argv) if len(args) > 1: gitdir = args[1] else: gitdir = os.getcwd() log_utils.default_logging_config() backend = DictBackend({"/": Repo(gitdir)}) app = make_wsgi_chain(backend) server = make_server( options.listen_address, options.port, app, handler_class=WSGIRequestHandlerLogger, server_class=WSGIServerLogger, ) logger.info( - "Listening for HTTP connections on %s:%d", options.listen_address, options.port + "Listening for HTTP connections on %s:%d", + options.listen_address, + options.port, ) server.serve_forever() if __name__ == "__main__": main() diff --git a/pyproject.toml b/pyproject.toml deleted file mode 100644 index a8f43fef..00000000 --- a/pyproject.toml +++ /dev/null @@ -1,2 +0,0 @@ -[tool.black] -line-length = 79 diff --git a/setup.cfg b/setup.cfg index 9e02035d..976ba029 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,2 @@ -[flake8] -exclude = build,.git,build-pypy,.tox - [mypy] ignore_missing_imports = True