diff --git a/dulwich/hooks.py b/dulwich/hooks.py
index def9c600..32f98844 100644
--- a/dulwich/hooks.py
+++ b/dulwich/hooks.py
@@ -1,162 +1,202 @@
# hooks.py -- for dealing with git hooks
# Copyright (C) 2012-2013 Jelmer Vernooij and others.
#
# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
# General Public License as public by the Free Software Foundation; version 2.0
# or (at your option) any later version. You can redistribute it and/or
# modify it under the terms of either of these two licenses.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# You should have received a copy of the licenses; if not, see
# for a copy of the GNU General Public License
# and for a copy of the Apache
# License, Version 2.0.
#
"""Access to hooks."""
import os
import subprocess
import sys
import tempfile
from dulwich.errors import (
HookError,
)
class Hook(object):
"""Generic hook object."""
def execute(self, *args):
"""Execute the hook with the given args
Args:
args: argument list to hook
Raises:
HookError: hook execution failure
Returns:
a hook may return a useful value
"""
raise NotImplementedError(self.execute)
class ShellHook(Hook):
"""Hook by executable file
Implements standard githooks(5) [0]:
[0] http://www.kernel.org/pub/software/scm/git/docs/githooks.html
"""
def __init__(self, name, path, numparam,
pre_exec_callback=None, post_exec_callback=None,
cwd=None):
"""Setup shell hook definition
Args:
name: name of hook for error messages
path: absolute path to executable file
numparam: number of requirements parameters
pre_exec_callback: closure for setup before execution
Defaults to None. Takes in the variable argument list from the
execute functions and returns a modified argument list for the
shell hook.
post_exec_callback: closure for cleanup after execution
Defaults to None. Takes in a boolean for hook success and the
modified argument list and returns the final hook return value
if applicable
cwd: working directory to switch to when executing the hook
"""
self.name = name
self.filepath = path
self.numparam = numparam
self.pre_exec_callback = pre_exec_callback
self.post_exec_callback = post_exec_callback
self.cwd = cwd
if sys.version_info[0] == 2 and sys.platform == 'win32':
# Python 2 on windows does not support unicode file paths
# http://bugs.python.org/issue1759845
self.filepath = self.filepath.encode(sys.getfilesystemencoding())
def execute(self, *args):
"""Execute the hook with given args"""
if len(args) != self.numparam:
raise HookError("Hook %s executed with wrong number of args. \
Expected %d. Saw %d. args: %s"
% (self.name, self.numparam, len(args), args))
if (self.pre_exec_callback is not None):
args = self.pre_exec_callback(*args)
try:
ret = subprocess.call([self.filepath] + list(args), cwd=self.cwd)
if ret != 0:
if (self.post_exec_callback is not None):
self.post_exec_callback(0, *args)
raise HookError("Hook %s exited with non-zero status"
% (self.name))
if (self.post_exec_callback is not None):
return self.post_exec_callback(1, *args)
except OSError: # no file. silent failure.
if (self.post_exec_callback is not None):
self.post_exec_callback(0, *args)
class PreCommitShellHook(ShellHook):
"""pre-commit shell hook"""
def __init__(self, controldir):
filepath = os.path.join(controldir, 'hooks', 'pre-commit')
ShellHook.__init__(self, 'pre-commit', filepath, 0, cwd=controldir)
class PostCommitShellHook(ShellHook):
"""post-commit shell hook"""
def __init__(self, controldir):
filepath = os.path.join(controldir, 'hooks', 'post-commit')
ShellHook.__init__(self, 'post-commit', filepath, 0, cwd=controldir)
class CommitMsgShellHook(ShellHook):
"""commit-msg shell hook
Args:
args[0]: commit message
Returns:
new commit message or None
"""
def __init__(self, controldir):
filepath = os.path.join(controldir, 'hooks', 'commit-msg')
def prepare_msg(*args):
(fd, path) = tempfile.mkstemp()
with os.fdopen(fd, 'wb') as f:
f.write(args[0])
return (path,)
def clean_msg(success, *args):
if success:
with open(args[0], 'rb') as f:
new_msg = f.read()
os.unlink(args[0])
return new_msg
os.unlink(args[0])
ShellHook.__init__(self, 'commit-msg', filepath, 1,
prepare_msg, clean_msg, controldir)
+
+
+class PostReceiveShellHook(ShellHook):
+ """post-receive shell hook"""
+
+ def __init__(self, controldir):
+ self.controldir = controldir
+ filepath = os.path.join(controldir, 'hooks', 'post-receive')
+ ShellHook.__init__(self, 'post-receive', filepath, 0)
+
+ def execute(self, client_refs):
+ # do nothing if the script doesn't exist
+ if not os.path.exists(self.filepath):
+ return None
+
+ try:
+ env = os.environ.copy()
+ env['GIT_DIR'] = self.controldir
+
+ p = subprocess.Popen(
+ self.filepath,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ env=env
+ )
+
+ # client_refs is a list of (oldsha, newsha, ref)
+ in_data = '\n'.join([' '.join(ref) for ref in client_refs])
+
+ out_data, err_data = p.communicate(in_data)
+
+ if (p.returncode != 0) or err_data:
+ err_fmt = "post-receive exit code: %d\n" \
+ + "stdout:\n%s\nstderr:\n%s"
+ err_msg = err_fmt % (p.returncode, out_data, err_data)
+ raise HookError(err_msg)
+ return out_data
+ except OSError as err:
+ raise HookError(repr(err))
diff --git a/dulwich/repo.py b/dulwich/repo.py
index 1e1a4c42..187cac45 100644
--- a/dulwich/repo.py
+++ b/dulwich/repo.py
@@ -1,1480 +1,1482 @@
# repo.py -- For dealing with git repositories.
# Copyright (C) 2007 James Westby
# Copyright (C) 2008-2013 Jelmer Vernooij
#
# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
# General Public License as public by the Free Software Foundation; version 2.0
# or (at your option) any later version. You can redistribute it and/or
# modify it under the terms of either of these two licenses.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# You should have received a copy of the licenses; if not, see
# for a copy of the GNU General Public License
# and for a copy of the Apache
# License, Version 2.0.
#
"""Repository access.
This module contains the base class for git repositories
(BaseRepo) and an implementation which uses a repository on
local disk (Repo).
"""
from io import BytesIO
import errno
import os
import sys
import stat
import time
from dulwich.errors import (
NoIndexPresent,
NotBlobError,
NotCommitError,
NotGitRepository,
NotTreeError,
NotTagError,
CommitError,
RefFormatError,
HookError,
)
from dulwich.file import (
GitFile,
)
from dulwich.object_store import (
DiskObjectStore,
MemoryObjectStore,
ObjectStoreGraphWalker,
)
from dulwich.objects import (
check_hexsha,
Blob,
Commit,
ShaFile,
Tag,
Tree,
)
from dulwich.pack import (
pack_objects_to_data,
)
from dulwich.hooks import (
PreCommitShellHook,
PostCommitShellHook,
CommitMsgShellHook,
+ PostReceiveShellHook,
)
from dulwich.line_ending import BlobNormalizer
from dulwich.refs import ( # noqa: F401
ANNOTATED_TAG_SUFFIX,
check_ref_format,
RefsContainer,
DictRefsContainer,
InfoRefsContainer,
DiskRefsContainer,
read_packed_refs,
read_packed_refs_with_peeled,
write_packed_refs,
SYMREF,
)
import warnings
CONTROLDIR = '.git'
OBJECTDIR = 'objects'
REFSDIR = 'refs'
REFSDIR_TAGS = 'tags'
REFSDIR_HEADS = 'heads'
INDEX_FILENAME = "index"
COMMONDIR = 'commondir'
GITDIR = 'gitdir'
WORKTREES = 'worktrees'
BASE_DIRECTORIES = [
["branches"],
[REFSDIR],
[REFSDIR, REFSDIR_TAGS],
[REFSDIR, REFSDIR_HEADS],
["hooks"],
["info"]
]
DEFAULT_REF = b'refs/heads/master'
class InvalidUserIdentity(Exception):
"""User identity is not of the format 'user '"""
def __init__(self, identity):
self.identity = identity
def _get_default_identity():
import getpass
import socket
username = getpass.getuser()
try:
import pwd
except ImportError:
fullname = None
else:
try:
gecos = pwd.getpwnam(username).pw_gecos
except KeyError:
fullname = None
else:
fullname = gecos.split(',')[0]
if not fullname:
fullname = username
email = os.environ.get('EMAIL')
if email is None:
email = "{}@{}".format(username, socket.gethostname())
return (fullname, email)
def get_user_identity(config, kind=None):
"""Determine the identity to use for new commits.
"""
if kind:
user = os.environ.get("GIT_" + kind + "_NAME")
if user is not None:
user = user.encode('utf-8')
email = os.environ.get("GIT_" + kind + "_EMAIL")
if email is not None:
email = email.encode('utf-8')
else:
user = None
email = None
if user is None:
try:
user = config.get(("user", ), "name")
except KeyError:
user = None
if email is None:
try:
email = config.get(("user", ), "email")
except KeyError:
email = None
default_user, default_email = _get_default_identity()
if user is None:
user = default_user
if not isinstance(user, bytes):
user = user.encode('utf-8')
if email is None:
email = default_email
if not isinstance(email, bytes):
email = email.encode('utf-8')
if email.startswith(b'<') and email.endswith(b'>'):
email = email[1:-1]
return (user + b" <" + email + b">")
def check_user_identity(identity):
"""Verify that a user identity is formatted correctly.
Args:
identity: User identity bytestring
Raises:
InvalidUserIdentity: Raised when identity is invalid
"""
try:
fst, snd = identity.split(b' <', 1)
except ValueError:
raise InvalidUserIdentity(identity)
if b'>' not in snd:
raise InvalidUserIdentity(identity)
def parse_graftpoints(graftpoints):
"""Convert a list of graftpoints into a dict
Args:
graftpoints: Iterator of graftpoint lines
Each line is formatted as:
[]*
Resulting dictionary is:
: [*]
https://git.wiki.kernel.org/index.php/GraftPoint
"""
grafts = {}
for l in graftpoints:
raw_graft = l.split(None, 1)
commit = raw_graft[0]
if len(raw_graft) == 2:
parents = raw_graft[1].split()
else:
parents = []
for sha in [commit] + parents:
check_hexsha(sha, 'Invalid graftpoint')
grafts[commit] = parents
return grafts
def serialize_graftpoints(graftpoints):
"""Convert a dictionary of grafts into string
The graft dictionary is:
: [*]
Each line is formatted as:
[]*
https://git.wiki.kernel.org/index.php/GraftPoint
"""
graft_lines = []
for commit, parents in graftpoints.items():
if parents:
graft_lines.append(commit + b' ' + b' '.join(parents))
else:
graft_lines.append(commit)
return b'\n'.join(graft_lines)
def _set_filesystem_hidden(path):
"""Mark path as to be hidden if supported by platform and filesystem.
On win32 uses SetFileAttributesW api:
"""
if sys.platform == 'win32':
import ctypes
from ctypes.wintypes import BOOL, DWORD, LPCWSTR
FILE_ATTRIBUTE_HIDDEN = 2
SetFileAttributesW = ctypes.WINFUNCTYPE(BOOL, LPCWSTR, DWORD)(
("SetFileAttributesW", ctypes.windll.kernel32))
if isinstance(path, bytes):
path = path.decode(sys.getfilesystemencoding())
if not SetFileAttributesW(path, FILE_ATTRIBUTE_HIDDEN):
pass # Could raise or log `ctypes.WinError()` here
# Could implement other platform specific filesytem hiding here
class BaseRepo(object):
"""Base class for a git repository.
:ivar object_store: Dictionary-like object for accessing
the objects
:ivar refs: Dictionary-like object with the refs in this
repository
"""
def __init__(self, object_store, refs):
"""Open a repository.
This shouldn't be called directly, but rather through one of the
base classes, such as MemoryRepo or Repo.
Args:
object_store: Object store to use
refs: Refs container to use
"""
self.object_store = object_store
self.refs = refs
self._graftpoints = {}
self.hooks = {}
def _determine_file_mode(self):
"""Probe the file-system to determine whether permissions can be trusted.
Returns: True if permissions can be trusted, False otherwise.
"""
raise NotImplementedError(self._determine_file_mode)
def _init_files(self, bare):
"""Initialize a default set of named files."""
from dulwich.config import ConfigFile
self._put_named_file('description', b"Unnamed repository")
f = BytesIO()
cf = ConfigFile()
cf.set("core", "repositoryformatversion", "0")
if self._determine_file_mode():
cf.set("core", "filemode", True)
else:
cf.set("core", "filemode", False)
cf.set("core", "bare", bare)
cf.set("core", "logallrefupdates", True)
cf.write_to_file(f)
self._put_named_file('config', f.getvalue())
self._put_named_file(os.path.join('info', 'exclude'), b'')
def get_named_file(self, path):
"""Get a file from the control dir with a specific name.
Although the filename should be interpreted as a filename relative to
the control dir in a disk-based Repo, the object returned need not be
pointing to a file in that location.
Args:
path: The path to the file, relative to the control dir.
Returns: An open file object, or None if the file does not exist.
"""
raise NotImplementedError(self.get_named_file)
def _put_named_file(self, path, contents):
"""Write a file to the control dir with the given name and contents.
Args:
path: The path to the file, relative to the control dir.
contents: A string to write to the file.
"""
raise NotImplementedError(self._put_named_file)
def _del_named_file(self, path):
"""Delete a file in the contrl directory with the given name."""
raise NotImplementedError(self._del_named_file)
def open_index(self):
"""Open the index for this repository.
Raises:
NoIndexPresent: If no index is present
Returns: The matching `Index`
"""
raise NotImplementedError(self.open_index)
def fetch(self, target, determine_wants=None, progress=None, depth=None):
"""Fetch objects into another repository.
Args:
target: The target repository
determine_wants: Optional function to determine what refs to
fetch.
progress: Optional progress function
depth: Optional shallow fetch depth
Returns: The local refs
"""
if determine_wants is None:
determine_wants = target.object_store.determine_wants_all
count, pack_data = self.fetch_pack_data(
determine_wants, target.get_graph_walker(), progress=progress,
depth=depth)
target.object_store.add_pack_data(count, pack_data, progress)
return self.get_refs()
def fetch_pack_data(self, determine_wants, graph_walker, progress,
get_tagged=None, depth=None):
"""Fetch the pack data required for a set of revisions.
Args:
determine_wants: Function that takes a dictionary with heads
and returns the list of heads to fetch.
graph_walker: Object that can iterate over the list of revisions
to fetch and has an "ack" method that will be called to acknowledge
that a revision is present.
progress: Simple progress function that will be called with
updated progress strings.
get_tagged: Function that returns a dict of pointed-to sha ->
tag sha for including tags.
depth: Shallow fetch depth
Returns: count and iterator over pack data
"""
# TODO(jelmer): Fetch pack data directly, don't create objects first.
objects = self.fetch_objects(determine_wants, graph_walker, progress,
get_tagged, depth=depth)
return pack_objects_to_data(objects)
def fetch_objects(self, determine_wants, graph_walker, progress,
get_tagged=None, depth=None):
"""Fetch the missing objects required for a set of revisions.
Args:
determine_wants: Function that takes a dictionary with heads
and returns the list of heads to fetch.
graph_walker: Object that can iterate over the list of revisions
to fetch and has an "ack" method that will be called to acknowledge
that a revision is present.
progress: Simple progress function that will be called with
updated progress strings.
get_tagged: Function that returns a dict of pointed-to sha ->
tag sha for including tags.
depth: Shallow fetch depth
Returns: iterator over objects, with __len__ implemented
"""
if depth not in (None, 0):
raise NotImplementedError("depth not supported yet")
refs = {}
for ref, sha in self.get_refs().items():
try:
obj = self.object_store[sha]
except KeyError:
warnings.warn(
'ref %s points at non-present sha %s' % (
ref.decode('utf-8', 'replace'), sha.decode('ascii')),
UserWarning)
continue
else:
if isinstance(obj, Tag):
refs[ref + ANNOTATED_TAG_SUFFIX] = obj.object[1]
refs[ref] = sha
wants = determine_wants(refs)
if not isinstance(wants, list):
raise TypeError("determine_wants() did not return a list")
shallows = getattr(graph_walker, 'shallow', frozenset())
unshallows = getattr(graph_walker, 'unshallow', frozenset())
if wants == []:
# TODO(dborowitz): find a way to short-circuit that doesn't change
# this interface.
if shallows or unshallows:
# Do not send a pack in shallow short-circuit path
return None
return []
# If the graph walker is set up with an implementation that can
# ACK/NAK to the wire, it will write data to the client through
# this call as a side-effect.
haves = self.object_store.find_common_revisions(graph_walker)
# Deal with shallow requests separately because the haves do
# not reflect what objects are missing
if shallows or unshallows:
# TODO: filter the haves commits from iter_shas. the specific
# commits aren't missing.
haves = []
def get_parents(commit):
if commit.id in shallows:
return []
return self.get_parents(commit.id, commit)
return self.object_store.iter_shas(
self.object_store.find_missing_objects(
haves, wants, progress,
get_tagged,
get_parents=get_parents))
def get_graph_walker(self, heads=None):
"""Retrieve a graph walker.
A graph walker is used by a remote repository (or proxy)
to find out which objects are present in this repository.
Args:
heads: Repository heads to use (optional)
Returns: A graph walker object
"""
if heads is None:
heads = [
sha for sha in self.refs.as_dict(b'refs/heads').values()
if sha in self.object_store]
return ObjectStoreGraphWalker(
heads, self.get_parents, shallow=self.get_shallow())
def get_refs(self):
"""Get dictionary with all refs.
Returns: A ``dict`` mapping ref names to SHA1s
"""
return self.refs.as_dict()
def head(self):
"""Return the SHA1 pointed at by HEAD."""
return self.refs[b'HEAD']
def _get_object(self, sha, cls):
assert len(sha) in (20, 40)
ret = self.get_object(sha)
if not isinstance(ret, cls):
if cls is Commit:
raise NotCommitError(ret)
elif cls is Blob:
raise NotBlobError(ret)
elif cls is Tree:
raise NotTreeError(ret)
elif cls is Tag:
raise NotTagError(ret)
else:
raise Exception("Type invalid: %r != %r" % (
ret.type_name, cls.type_name))
return ret
def get_object(self, sha):
"""Retrieve the object with the specified SHA.
Args:
sha: SHA to retrieve
Returns: A ShaFile object
Raises:
KeyError: when the object can not be found
"""
return self.object_store[sha]
def get_parents(self, sha, commit=None):
"""Retrieve the parents of a specific commit.
If the specific commit is a graftpoint, the graft parents
will be returned instead.
Args:
sha: SHA of the commit for which to retrieve the parents
commit: Optional commit matching the sha
Returns: List of parents
"""
try:
return self._graftpoints[sha]
except KeyError:
if commit is None:
commit = self[sha]
return commit.parents
def get_config(self):
"""Retrieve the config object.
Returns: `ConfigFile` object for the ``.git/config`` file.
"""
raise NotImplementedError(self.get_config)
def get_description(self):
"""Retrieve the description for this repository.
Returns: String with the description of the repository
as set by the user.
"""
raise NotImplementedError(self.get_description)
def set_description(self, description):
"""Set the description for this repository.
Args:
description: Text to set as description for this repository.
"""
raise NotImplementedError(self.set_description)
def get_config_stack(self):
"""Return a config stack for this repository.
This stack accesses the configuration for both this repository
itself (.git/config) and the global configuration, which usually
lives in ~/.gitconfig.
Returns: `Config` instance for this repository
"""
from dulwich.config import StackedConfig
backends = [self.get_config()] + StackedConfig.default_backends()
return StackedConfig(backends, writable=backends[0])
def get_shallow(self):
"""Get the set of shallow commits.
Returns: Set of shallow commits.
"""
f = self.get_named_file('shallow')
if f is None:
return set()
with f:
return set(l.strip() for l in f)
def update_shallow(self, new_shallow, new_unshallow):
"""Update the list of shallow objects.
Args:
new_shallow: Newly shallow objects
new_unshallow: Newly no longer shallow objects
"""
shallow = self.get_shallow()
if new_shallow:
shallow.update(new_shallow)
if new_unshallow:
shallow.difference_update(new_unshallow)
self._put_named_file(
'shallow',
b''.join([sha + b'\n' for sha in shallow]))
def get_peeled(self, ref):
"""Get the peeled value of a ref.
Args:
ref: The refname to peel.
Returns: The fully-peeled SHA1 of a tag object, after peeling all
intermediate tags; if the original ref does not point to a tag,
this will equal the original SHA1.
"""
cached = self.refs.get_peeled(ref)
if cached is not None:
return cached
return self.object_store.peel_sha(self.refs[ref]).id
def get_walker(self, include=None, *args, **kwargs):
"""Obtain a walker for this repository.
Args:
include: Iterable of SHAs of commits to include along with their
ancestors. Defaults to [HEAD]
exclude: Iterable of SHAs of commits to exclude along with their
ancestors, overriding includes.
order: ORDER_* constant specifying the order of results.
Anything other than ORDER_DATE may result in O(n) memory usage.
reverse: If True, reverse the order of output, requiring O(n)
memory.
max_entries: The maximum number of entries to yield, or None for
no limit.
paths: Iterable of file or subtree paths to show entries for.
rename_detector: diff.RenameDetector object for detecting
renames.
follow: If True, follow path across renames/copies. Forces a
default rename_detector.
since: Timestamp to list commits after.
until: Timestamp to list commits before.
queue_cls: A class to use for a queue of commits, supporting the
iterator protocol. The constructor takes a single argument, the
Walker.
Returns: A `Walker` object
"""
from dulwich.walk import Walker
if include is None:
include = [self.head()]
if isinstance(include, str):
include = [include]
kwargs['get_parents'] = lambda commit: self.get_parents(
commit.id, commit)
return Walker(self.object_store, include, *args, **kwargs)
def __getitem__(self, name):
"""Retrieve a Git object by SHA1 or ref.
Args:
name: A Git object SHA1 or a ref name
Returns: A `ShaFile` object, such as a Commit or Blob
Raises:
KeyError: when the specified ref or object does not exist
"""
if not isinstance(name, bytes):
raise TypeError("'name' must be bytestring, not %.80s" %
type(name).__name__)
if len(name) in (20, 40):
try:
return self.object_store[name]
except (KeyError, ValueError):
pass
try:
return self.object_store[self.refs[name]]
except RefFormatError:
raise KeyError(name)
def __contains__(self, name):
"""Check if a specific Git object or ref is present.
Args:
name: Git object SHA1 or ref name
"""
if len(name) in (20, 40):
return name in self.object_store or name in self.refs
else:
return name in self.refs
def __setitem__(self, name, value):
"""Set a ref.
Args:
name: ref name
value: Ref value - either a ShaFile object, or a hex sha
"""
if name.startswith(b"refs/") or name == b'HEAD':
if isinstance(value, ShaFile):
self.refs[name] = value.id
elif isinstance(value, bytes):
self.refs[name] = value
else:
raise TypeError(value)
else:
raise ValueError(name)
def __delitem__(self, name):
"""Remove a ref.
Args:
name: Name of the ref to remove
"""
if name.startswith(b"refs/") or name == b"HEAD":
del self.refs[name]
else:
raise ValueError(name)
def _get_user_identity(self, config, kind=None):
"""Determine the identity to use for new commits.
"""
# TODO(jelmer): Deprecate this function in favor of get_user_identity
return get_user_identity(config)
def _add_graftpoints(self, updated_graftpoints):
"""Add or modify graftpoints
Args:
updated_graftpoints: Dict of commit shas to list of parent shas
"""
# Simple validation
for commit, parents in updated_graftpoints.items():
for sha in [commit] + parents:
check_hexsha(sha, 'Invalid graftpoint')
self._graftpoints.update(updated_graftpoints)
def _remove_graftpoints(self, to_remove=[]):
"""Remove graftpoints
Args:
to_remove: List of commit shas
"""
for sha in to_remove:
del self._graftpoints[sha]
def _read_heads(self, name):
f = self.get_named_file(name)
if f is None:
return []
with f:
return [l.strip() for l in f.readlines() if l.strip()]
def do_commit(self, message=None, committer=None,
author=None, commit_timestamp=None,
commit_timezone=None, author_timestamp=None,
author_timezone=None, tree=None, encoding=None,
ref=b'HEAD', merge_heads=None):
"""Create a new commit.
Args:
message: Commit message
committer: Committer fullname
author: Author fullname (defaults to committer)
commit_timestamp: Commit timestamp (defaults to now)
commit_timezone: Commit timestamp timezone (defaults to GMT)
author_timestamp: Author timestamp (defaults to commit
timestamp)
author_timezone: Author timestamp timezone
(defaults to commit timestamp timezone)
tree: SHA1 of the tree root to use (if not specified the
current index will be committed).
encoding: Encoding
ref: Optional ref to commit to (defaults to current branch)
merge_heads: Merge heads (defaults to .git/MERGE_HEADS)
Returns: New commit SHA1
"""
import time
c = Commit()
if tree is None:
index = self.open_index()
c.tree = index.commit(self.object_store)
else:
if len(tree) != 40:
raise ValueError("tree must be a 40-byte hex sha string")
c.tree = tree
try:
self.hooks['pre-commit'].execute()
except HookError as e:
raise CommitError(e)
except KeyError: # no hook defined, silent fallthrough
pass
config = self.get_config_stack()
if merge_heads is None:
merge_heads = self._read_heads('MERGE_HEADS')
if committer is None:
committer = get_user_identity(config, kind='COMMITTER')
check_user_identity(committer)
c.committer = committer
if commit_timestamp is None:
# FIXME: Support GIT_COMMITTER_DATE environment variable
commit_timestamp = time.time()
c.commit_time = int(commit_timestamp)
if commit_timezone is None:
# FIXME: Use current user timezone rather than UTC
commit_timezone = 0
c.commit_timezone = commit_timezone
if author is None:
author = get_user_identity(config, kind='AUTHOR')
c.author = author
check_user_identity(author)
if author_timestamp is None:
# FIXME: Support GIT_AUTHOR_DATE environment variable
author_timestamp = commit_timestamp
c.author_time = int(author_timestamp)
if author_timezone is None:
author_timezone = commit_timezone
c.author_timezone = author_timezone
if encoding is None:
try:
encoding = config.get(('i18n', ), 'commitEncoding')
except KeyError:
pass # No dice
if encoding is not None:
c.encoding = encoding
if message is None:
# FIXME: Try to read commit message from .git/MERGE_MSG
raise ValueError("No commit message specified")
try:
c.message = self.hooks['commit-msg'].execute(message)
if c.message is None:
c.message = message
except HookError as e:
raise CommitError(e)
except KeyError: # no hook defined, message not modified
c.message = message
if ref is None:
# Create a dangling commit
c.parents = merge_heads
self.object_store.add_object(c)
else:
try:
old_head = self.refs[ref]
c.parents = [old_head] + merge_heads
self.object_store.add_object(c)
ok = self.refs.set_if_equals(
ref, old_head, c.id, message=b"commit: " + message,
committer=committer, timestamp=commit_timestamp,
timezone=commit_timezone)
except KeyError:
c.parents = merge_heads
self.object_store.add_object(c)
ok = self.refs.add_if_new(
ref, c.id, message=b"commit: " + message,
committer=committer, timestamp=commit_timestamp,
timezone=commit_timezone)
if not ok:
# Fail if the atomic compare-and-swap failed, leaving the
# commit and all its objects as garbage.
raise CommitError("%s changed during commit" % (ref,))
self._del_named_file('MERGE_HEADS')
try:
self.hooks['post-commit'].execute()
except HookError as e: # silent failure
warnings.warn("post-commit hook failed: %s" % e, UserWarning)
except KeyError: # no hook defined, silent fallthrough
pass
return c.id
def read_gitfile(f):
"""Read a ``.git`` file.
The first line of the file should start with "gitdir: "
Args:
f: File-like object to read from
Returns: A path
"""
cs = f.read()
if not cs.startswith("gitdir: "):
raise ValueError("Expected file to start with 'gitdir: '")
return cs[len("gitdir: "):].rstrip("\n")
class Repo(BaseRepo):
"""A git repository backed by local disk.
To open an existing repository, call the contructor with
the path of the repository.
To create a new repository, use the Repo.init class method.
"""
def __init__(self, root):
hidden_path = os.path.join(root, CONTROLDIR)
if os.path.isdir(os.path.join(hidden_path, OBJECTDIR)):
self.bare = False
self._controldir = hidden_path
elif (os.path.isdir(os.path.join(root, OBJECTDIR)) and
os.path.isdir(os.path.join(root, REFSDIR))):
self.bare = True
self._controldir = root
elif os.path.isfile(hidden_path):
self.bare = False
with open(hidden_path, 'r') as f:
path = read_gitfile(f)
self.bare = False
self._controldir = os.path.join(root, path)
else:
raise NotGitRepository(
"No git repository was found at %(path)s" % dict(path=root)
)
commondir = self.get_named_file(COMMONDIR)
if commondir is not None:
with commondir:
self._commondir = os.path.join(
self.controldir(),
commondir.read().rstrip(b"\r\n").decode(
sys.getfilesystemencoding()))
else:
self._commondir = self._controldir
self.path = root
object_store = DiskObjectStore(
os.path.join(self.commondir(), OBJECTDIR))
refs = DiskRefsContainer(self.commondir(), self._controldir,
logger=self._write_reflog)
BaseRepo.__init__(self, object_store, refs)
self._graftpoints = {}
graft_file = self.get_named_file(os.path.join("info", "grafts"),
basedir=self.commondir())
if graft_file:
with graft_file:
self._graftpoints.update(parse_graftpoints(graft_file))
graft_file = self.get_named_file("shallow",
basedir=self.commondir())
if graft_file:
with graft_file:
self._graftpoints.update(parse_graftpoints(graft_file))
self.hooks['pre-commit'] = PreCommitShellHook(self.controldir())
self.hooks['commit-msg'] = CommitMsgShellHook(self.controldir())
self.hooks['post-commit'] = PostCommitShellHook(self.controldir())
+ self.hooks['post-receive'] = PostReceiveShellHook(self.controldir())
def _write_reflog(self, ref, old_sha, new_sha, committer, timestamp,
timezone, message):
from .reflog import format_reflog_line
path = os.path.join(
self.controldir(), 'logs',
ref.decode(sys.getfilesystemencoding()))
try:
os.makedirs(os.path.dirname(path))
except OSError as e:
if e.errno != errno.EEXIST:
raise
if committer is None:
config = self.get_config_stack()
committer = self._get_user_identity(config)
check_user_identity(committer)
if timestamp is None:
timestamp = int(time.time())
if timezone is None:
timezone = 0 # FIXME
with open(path, 'ab') as f:
f.write(format_reflog_line(old_sha, new_sha, committer,
timestamp, timezone, message) + b'\n')
@classmethod
def discover(cls, start='.'):
"""Iterate parent directories to discover a repository
Return a Repo object for the first parent directory that looks like a
Git repository.
Args:
start: The directory to start discovery from (defaults to '.')
"""
remaining = True
path = os.path.abspath(start)
while remaining:
try:
return cls(path)
except NotGitRepository:
path, remaining = os.path.split(path)
raise NotGitRepository(
"No git repository was found at %(path)s" % dict(path=start)
)
def controldir(self):
"""Return the path of the control directory."""
return self._controldir
def commondir(self):
"""Return the path of the common directory.
For a main working tree, it is identical to controldir().
For a linked working tree, it is the control directory of the
main working tree."""
return self._commondir
def _determine_file_mode(self):
"""Probe the file-system to determine whether permissions can be trusted.
Returns: True if permissions can be trusted, False otherwise.
"""
fname = os.path.join(self.path, '.probe-permissions')
with open(fname, 'w') as f:
f.write('')
st1 = os.lstat(fname)
try:
os.chmod(fname, st1.st_mode ^ stat.S_IXUSR)
except EnvironmentError as e:
if e.errno == errno.EPERM:
return False
raise
st2 = os.lstat(fname)
os.unlink(fname)
mode_differs = st1.st_mode != st2.st_mode
st2_has_exec = (st2.st_mode & stat.S_IXUSR) != 0
return mode_differs and st2_has_exec
def _put_named_file(self, path, contents):
"""Write a file to the control dir with the given name and contents.
Args:
path: The path to the file, relative to the control dir.
contents: A string to write to the file.
"""
path = path.lstrip(os.path.sep)
with GitFile(os.path.join(self.controldir(), path), 'wb') as f:
f.write(contents)
def _del_named_file(self, path):
try:
os.unlink(os.path.join(self.controldir(), path))
except (IOError, OSError) as e:
if e.errno == errno.ENOENT:
return
raise
def get_named_file(self, path, basedir=None):
"""Get a file from the control dir with a specific name.
Although the filename should be interpreted as a filename relative to
the control dir in a disk-based Repo, the object returned need not be
pointing to a file in that location.
Args:
path: The path to the file, relative to the control dir.
basedir: Optional argument that specifies an alternative to the
control dir.
Returns: An open file object, or None if the file does not exist.
"""
# TODO(dborowitz): sanitize filenames, since this is used directly by
# the dumb web serving code.
if basedir is None:
basedir = self.controldir()
path = path.lstrip(os.path.sep)
try:
return open(os.path.join(basedir, path), 'rb')
except (IOError, OSError) as e:
if e.errno == errno.ENOENT:
return None
raise
def index_path(self):
"""Return path to the index file."""
return os.path.join(self.controldir(), INDEX_FILENAME)
def open_index(self):
"""Open the index for this repository.
Raises:
NoIndexPresent: If no index is present
Returns: The matching `Index`
"""
from dulwich.index import Index
if not self.has_index():
raise NoIndexPresent()
return Index(self.index_path())
def has_index(self):
"""Check if an index is present."""
# Bare repos must never have index files; non-bare repos may have a
# missing index file, which is treated as empty.
return not self.bare
def stage(self, fs_paths):
"""Stage a set of paths.
Args:
fs_paths: List of paths, relative to the repository path
"""
root_path_bytes = self.path.encode(sys.getfilesystemencoding())
if not isinstance(fs_paths, list):
fs_paths = [fs_paths]
from dulwich.index import (
blob_from_path_and_stat,
index_entry_from_stat,
_fs_to_tree_path,
)
index = self.open_index()
blob_normalizer = self.get_blob_normalizer()
for fs_path in fs_paths:
if not isinstance(fs_path, bytes):
fs_path = fs_path.encode(sys.getfilesystemencoding())
if os.path.isabs(fs_path):
raise ValueError(
"path %r should be relative to "
"repository root, not absolute" % fs_path)
tree_path = _fs_to_tree_path(fs_path)
full_path = os.path.join(root_path_bytes, fs_path)
try:
st = os.lstat(full_path)
except OSError:
# File no longer exists
try:
del index[tree_path]
except KeyError:
pass # already removed
else:
if not stat.S_ISDIR(st.st_mode):
blob = blob_from_path_and_stat(full_path, st)
blob = blob_normalizer.checkin_normalize(blob, fs_path)
self.object_store.add_object(blob)
index[tree_path] = index_entry_from_stat(st, blob.id, 0)
else:
try:
del index[tree_path]
except KeyError:
pass
index.write()
def clone(self, target_path, mkdir=True, bare=False,
origin=b"origin", checkout=None):
"""Clone this repository.
Args:
target_path: Target path
mkdir: Create the target directory
bare: Whether to create a bare repository
origin: Base name for refs in target repository
cloned from this repository
Returns: Created repository as `Repo`
"""
if not bare:
target = self.init(target_path, mkdir=mkdir)
else:
if checkout:
raise ValueError("checkout and bare are incompatible")
target = self.init_bare(target_path, mkdir=mkdir)
self.fetch(target)
encoded_path = self.path
if not isinstance(encoded_path, bytes):
encoded_path = encoded_path.encode(sys.getfilesystemencoding())
ref_message = b"clone: from " + encoded_path
target.refs.import_refs(
b'refs/remotes/' + origin, self.refs.as_dict(b'refs/heads'),
message=ref_message)
target.refs.import_refs(
b'refs/tags', self.refs.as_dict(b'refs/tags'),
message=ref_message)
try:
target.refs.add_if_new(
DEFAULT_REF, self.refs[DEFAULT_REF],
message=ref_message)
except KeyError:
pass
target_config = target.get_config()
target_config.set(('remote', 'origin'), 'url', encoded_path)
target_config.set(('remote', 'origin'), 'fetch',
'+refs/heads/*:refs/remotes/origin/*')
target_config.write_to_path()
# Update target head
head_chain, head_sha = self.refs.follow(b'HEAD')
if head_chain and head_sha is not None:
target.refs.set_symbolic_ref(b'HEAD', head_chain[-1],
message=ref_message)
target[b'HEAD'] = head_sha
if checkout is None:
checkout = (not bare)
if checkout:
# Checkout HEAD to target dir
target.reset_index()
return target
def reset_index(self, tree=None):
"""Reset the index back to a specific tree.
Args:
tree: Tree SHA to reset to, None for current HEAD tree.
"""
from dulwich.index import (
build_index_from_tree,
validate_path_element_default,
validate_path_element_ntfs,
)
if tree is None:
tree = self[b'HEAD'].tree
config = self.get_config()
honor_filemode = config.get_boolean(
b'core', b'filemode', os.name != "nt")
if config.get_boolean(b'core', b'core.protectNTFS', os.name == "nt"):
validate_path_element = validate_path_element_ntfs
else:
validate_path_element = validate_path_element_default
return build_index_from_tree(
self.path, self.index_path(), self.object_store, tree,
honor_filemode=honor_filemode,
validate_path_element=validate_path_element)
def get_config(self):
"""Retrieve the config object.
Returns: `ConfigFile` object for the ``.git/config`` file.
"""
from dulwich.config import ConfigFile
path = os.path.join(self._controldir, 'config')
try:
return ConfigFile.from_path(path)
except (IOError, OSError) as e:
if e.errno != errno.ENOENT:
raise
ret = ConfigFile()
ret.path = path
return ret
def get_description(self):
"""Retrieve the description of this repository.
Returns: A string describing the repository or None.
"""
path = os.path.join(self._controldir, 'description')
try:
with GitFile(path, 'rb') as f:
return f.read()
except (IOError, OSError) as e:
if e.errno != errno.ENOENT:
raise
return None
def __repr__(self):
return "" % self.path
def set_description(self, description):
"""Set the description for this repository.
Args:
description: Text to set as description for this repository.
"""
self._put_named_file('description', description)
@classmethod
def _init_maybe_bare(cls, path, bare):
for d in BASE_DIRECTORIES:
os.mkdir(os.path.join(path, *d))
DiskObjectStore.init(os.path.join(path, OBJECTDIR))
ret = cls(path)
ret.refs.set_symbolic_ref(b'HEAD', DEFAULT_REF)
ret._init_files(bare)
return ret
@classmethod
def init(cls, path, mkdir=False):
"""Create a new repository.
Args:
path: Path in which to create the repository
mkdir: Whether to create the directory
Returns: `Repo` instance
"""
if mkdir:
os.mkdir(path)
controldir = os.path.join(path, CONTROLDIR)
os.mkdir(controldir)
_set_filesystem_hidden(controldir)
cls._init_maybe_bare(controldir, False)
return cls(path)
@classmethod
def _init_new_working_directory(cls, path, main_repo, identifier=None,
mkdir=False):
"""Create a new working directory linked to a repository.
Args:
path: Path in which to create the working tree.
main_repo: Main repository to reference
identifier: Worktree identifier
mkdir: Whether to create the directory
Returns: `Repo` instance
"""
if mkdir:
os.mkdir(path)
if identifier is None:
identifier = os.path.basename(path)
main_worktreesdir = os.path.join(main_repo.controldir(), WORKTREES)
worktree_controldir = os.path.join(main_worktreesdir, identifier)
gitdirfile = os.path.join(path, CONTROLDIR)
with open(gitdirfile, 'wb') as f:
f.write(b'gitdir: ' +
worktree_controldir.encode(sys.getfilesystemencoding()) +
b'\n')
try:
os.mkdir(main_worktreesdir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
try:
os.mkdir(worktree_controldir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
with open(os.path.join(worktree_controldir, GITDIR), 'wb') as f:
f.write(gitdirfile.encode(sys.getfilesystemencoding()) + b'\n')
with open(os.path.join(worktree_controldir, COMMONDIR), 'wb') as f:
f.write(b'../..\n')
with open(os.path.join(worktree_controldir, 'HEAD'), 'wb') as f:
f.write(main_repo.head() + b'\n')
r = cls(path)
r.reset_index()
return r
@classmethod
def init_bare(cls, path, mkdir=False):
"""Create a new bare repository.
``path`` should already exist and be an empty directory.
Args:
path: Path to create bare repository in
Returns: a `Repo` instance
"""
if mkdir:
os.mkdir(path)
return cls._init_maybe_bare(path, True)
create = init_bare
def close(self):
"""Close any files opened by this repository."""
self.object_store.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def get_blob_normalizer(self):
""" Return a BlobNormalizer object
"""
# TODO Parse the git attributes files
git_attributes = {}
return BlobNormalizer(
self.get_config_stack(), git_attributes
)
class MemoryRepo(BaseRepo):
"""Repo that stores refs, objects, and named files in memory.
MemoryRepos are always bare: they have no working tree and no index, since
those have a stronger dependency on the filesystem.
"""
def __init__(self):
from dulwich.config import ConfigFile
self._reflog = []
refs_container = DictRefsContainer({}, logger=self._append_reflog)
BaseRepo.__init__(self, MemoryObjectStore(), refs_container)
self._named_files = {}
self.bare = True
self._config = ConfigFile()
self._description = None
def _append_reflog(self, *args):
self._reflog.append(args)
def set_description(self, description):
self._description = description
def get_description(self):
return self._description
def _determine_file_mode(self):
"""Probe the file-system to determine whether permissions can be trusted.
Returns: True if permissions can be trusted, False otherwise.
"""
return sys.platform != 'win32'
def _put_named_file(self, path, contents):
"""Write a file to the control dir with the given name and contents.
Args:
path: The path to the file, relative to the control dir.
contents: A string to write to the file.
"""
self._named_files[path] = contents
def _del_named_file(self, path):
try:
del self._named_files[path]
except KeyError:
pass
def get_named_file(self, path, basedir=None):
"""Get a file from the control dir with a specific name.
Although the filename should be interpreted as a filename relative to
the control dir in a disk-baked Repo, the object returned need not be
pointing to a file in that location.
Args:
path: The path to the file, relative to the control dir.
Returns: An open file object, or None if the file does not exist.
"""
contents = self._named_files.get(path, None)
if contents is None:
return None
return BytesIO(contents)
def open_index(self):
"""Fail to open index for this repo, since it is bare.
Raises:
NoIndexPresent: Raised when no index is present
"""
raise NoIndexPresent()
def get_config(self):
"""Retrieve the config object.
Returns: `ConfigFile` object.
"""
return self._config
@classmethod
def init_bare(cls, objects, refs):
"""Create a new bare repository in memory.
Args:
objects: Objects for the new repository,
as iterable
refs: Refs as dictionary, mapping names
to object SHA1s
"""
ret = cls()
for obj in objects:
ret.object_store.add_object(obj)
for refname, sha in refs.items():
ret.refs.add_if_new(refname, sha)
ret._init_files(bare=True)
return ret
diff --git a/dulwich/server.py b/dulwich/server.py
index f5af537e..2aae5b6d 100644
--- a/dulwich/server.py
+++ b/dulwich/server.py
@@ -1,1200 +1,1214 @@
# server.py -- Implementation of the server side git protocols
# Copyright (C) 2008 John Carr
# Coprygith (C) 2011-2012 Jelmer Vernooij
#
# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
# General Public License as public by the Free Software Foundation; version 2.0
# or (at your option) any later version. You can redistribute it and/or
# modify it under the terms of either of these two licenses.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# You should have received a copy of the licenses; if not, see
# for a copy of the GNU General Public License
# and for a copy of the Apache
# License, Version 2.0.
#
"""Git smart network protocol server implementation.
For more detailed implementation on the network protocol, see the
Documentation/technical directory in the cgit distribution, and in particular:
* Documentation/technical/protocol-capabilities.txt
* Documentation/technical/pack-protocol.txt
Currently supported capabilities:
* include-tag
* thin-pack
* multi_ack_detailed
* multi_ack
* side-band-64k
* ofs-delta
* no-progress
* report-status
* delete-refs
* shallow
* symref
"""
import collections
import os
import socket
import sys
import time
import zlib
try:
import SocketServer
except ImportError:
import socketserver as SocketServer
from dulwich.archive import tar_stream
from dulwich.errors import (
ApplyDeltaError,
ChecksumMismatch,
GitProtocolError,
+ HookError,
NotGitRepository,
UnexpectedCommandError,
ObjectFormatException,
)
from dulwich import log_utils
from dulwich.objects import (
Commit,
valid_hexsha,
)
from dulwich.pack import (
write_pack_objects,
)
from dulwich.protocol import ( # noqa: F401
BufferedPktLineWriter,
capability_agent,
CAPABILITIES_REF,
CAPABILITY_DELETE_REFS,
CAPABILITY_INCLUDE_TAG,
CAPABILITY_MULTI_ACK_DETAILED,
CAPABILITY_MULTI_ACK,
CAPABILITY_NO_DONE,
CAPABILITY_NO_PROGRESS,
CAPABILITY_OFS_DELTA,
CAPABILITY_QUIET,
CAPABILITY_REPORT_STATUS,
CAPABILITY_SHALLOW,
CAPABILITY_SIDE_BAND_64K,
CAPABILITY_THIN_PACK,
COMMAND_DEEPEN,
COMMAND_DONE,
COMMAND_HAVE,
COMMAND_SHALLOW,
COMMAND_UNSHALLOW,
COMMAND_WANT,
MULTI_ACK,
MULTI_ACK_DETAILED,
Protocol,
ProtocolFile,
ReceivableProtocol,
SIDE_BAND_CHANNEL_DATA,
SIDE_BAND_CHANNEL_PROGRESS,
SIDE_BAND_CHANNEL_FATAL,
SINGLE_ACK,
TCP_GIT_PORT,
ZERO_SHA,
ack_type,
extract_capabilities,
extract_want_line_capabilities,
symref_capabilities,
)
from dulwich.refs import (
ANNOTATED_TAG_SUFFIX,
write_info_refs,
)
from dulwich.repo import (
Repo,
)
logger = log_utils.getLogger(__name__)
class Backend(object):
"""A backend for the Git smart server implementation."""
def open_repository(self, path):
"""Open the repository at a path.
Args:
path: Path to the repository
Raises:
NotGitRepository: no git repository was found at path
Returns: Instance of BackendRepo
"""
raise NotImplementedError(self.open_repository)
class BackendRepo(object):
"""Repository abstraction used by the Git server.
The methods required here are a subset of those provided by
dulwich.repo.Repo.
"""
object_store = None
refs = None
def get_refs(self):
"""
Get all the refs in the repository
Returns: dict of name -> sha
"""
raise NotImplementedError
def get_peeled(self, name):
"""Return the cached peeled value of a ref, if available.
Args:
name: Name of the ref to peel
Returns: The peeled value of the ref. If the ref is known not point to
a tag, this will be the SHA the ref refers to. If no cached
information about a tag is available, this method may return None,
but it should attempt to peel the tag if possible.
"""
return None
def fetch_objects(self, determine_wants, graph_walker, progress,
get_tagged=None):
"""
Yield the objects required for a list of commits.
Args:
progress: is a callback to send progress messages to the client
get_tagged: Function that returns a dict of pointed-to sha ->
tag sha for including tags.
"""
raise NotImplementedError
class DictBackend(Backend):
"""Trivial backend that looks up Git repositories in a dictionary."""
def __init__(self, repos):
self.repos = repos
def open_repository(self, path):
logger.debug('Opening repository at %s', path)
try:
return self.repos[path]
except KeyError:
raise NotGitRepository(
"No git repository was found at %(path)s" % dict(path=path)
)
class FileSystemBackend(Backend):
"""Simple backend looking up Git repositories in the local file system."""
def __init__(self, root=os.sep):
super(FileSystemBackend, self).__init__()
self.root = (os.path.abspath(root) + os.sep).replace(
os.sep * 2, os.sep)
def open_repository(self, path):
logger.debug('opening repository at %s', path)
abspath = os.path.abspath(os.path.join(self.root, path)) + os.sep
normcase_abspath = os.path.normcase(abspath)
normcase_root = os.path.normcase(self.root)
if not normcase_abspath.startswith(normcase_root):
raise NotGitRepository(
"Path %r not inside root %r" %
(path, self.root))
return Repo(abspath)
class Handler(object):
"""Smart protocol command handler base class."""
def __init__(self, backend, proto, http_req=None):
self.backend = backend
self.proto = proto
self.http_req = http_req
def handle(self):
raise NotImplementedError(self.handle)
class PackHandler(Handler):
"""Protocol handler for packs."""
def __init__(self, backend, proto, http_req=None):
super(PackHandler, self).__init__(backend, proto, http_req)
self._client_capabilities = None
# Flags needed for the no-done capability
self._done_received = False
@classmethod
def capability_line(cls, capabilities):
logger.info('Sending capabilities: %s', capabilities)
return b"".join([b" " + c for c in capabilities])
@classmethod
def capabilities(cls):
raise NotImplementedError(cls.capabilities)
@classmethod
def innocuous_capabilities(cls):
return [CAPABILITY_INCLUDE_TAG, CAPABILITY_THIN_PACK,
CAPABILITY_NO_PROGRESS, CAPABILITY_OFS_DELTA,
capability_agent()]
@classmethod
def required_capabilities(cls):
"""Return a list of capabilities that we require the client to have."""
return []
def set_client_capabilities(self, caps):
allowable_caps = set(self.innocuous_capabilities())
allowable_caps.update(self.capabilities())
for cap in caps:
if cap not in allowable_caps:
raise GitProtocolError('Client asked for capability %s that '
'was not advertised.' % cap)
for cap in self.required_capabilities():
if cap not in caps:
raise GitProtocolError('Client does not support required '
'capability %s.' % cap)
self._client_capabilities = set(caps)
logger.info('Client capabilities: %s', caps)
def has_capability(self, cap):
if self._client_capabilities is None:
raise GitProtocolError('Server attempted to access capability %s '
'before asking client' % cap)
return cap in self._client_capabilities
def notify_done(self):
self._done_received = True
class UploadPackHandler(PackHandler):
"""Protocol handler for uploading a pack to the client."""
def __init__(self, backend, args, proto, http_req=None,
advertise_refs=False):
super(UploadPackHandler, self).__init__(
backend, proto, http_req=http_req)
self.repo = backend.open_repository(args[0])
self._graph_walker = None
self.advertise_refs = advertise_refs
# A state variable for denoting that the have list is still
# being processed, and the client is not accepting any other
# data (such as side-band, see the progress method here).
self._processing_have_lines = False
@classmethod
def capabilities(cls):
return [CAPABILITY_MULTI_ACK_DETAILED, CAPABILITY_MULTI_ACK,
CAPABILITY_SIDE_BAND_64K, CAPABILITY_THIN_PACK,
CAPABILITY_OFS_DELTA, CAPABILITY_NO_PROGRESS,
CAPABILITY_INCLUDE_TAG, CAPABILITY_SHALLOW, CAPABILITY_NO_DONE]
@classmethod
def required_capabilities(cls):
return (CAPABILITY_SIDE_BAND_64K, CAPABILITY_THIN_PACK,
CAPABILITY_OFS_DELTA)
def progress(self, message):
if (self.has_capability(CAPABILITY_NO_PROGRESS) or
self._processing_have_lines):
return
self.proto.write_sideband(SIDE_BAND_CHANNEL_PROGRESS, message)
def get_tagged(self, refs=None, repo=None):
"""Get a dict of peeled values of tags to their original tag shas.
Args:
refs: dict of refname -> sha of possible tags; defaults to all
of the backend's refs.
repo: optional Repo instance for getting peeled refs; defaults
to the backend's repo, if available
Returns: dict of peeled_sha -> tag_sha, where tag_sha is the sha of a
tag whose peeled value is peeled_sha.
"""
if not self.has_capability(CAPABILITY_INCLUDE_TAG):
return {}
if refs is None:
refs = self.repo.get_refs()
if repo is None:
repo = getattr(self.repo, "repo", None)
if repo is None:
# Bail if we don't have a Repo available; this is ok since
# clients must be able to handle if the server doesn't include
# all relevant tags.
# TODO: fix behavior when missing
return {}
# TODO(jelmer): Integrate this with the refs logic in
# Repo.fetch_objects
tagged = {}
for name, sha in refs.items():
peeled_sha = repo.get_peeled(name)
if peeled_sha != sha:
tagged[peeled_sha] = sha
return tagged
def handle(self):
def write(x):
return self.proto.write_sideband(SIDE_BAND_CHANNEL_DATA, x)
graph_walker = _ProtocolGraphWalker(
self, self.repo.object_store, self.repo.get_peeled,
self.repo.refs.get_symrefs)
objects_iter = self.repo.fetch_objects(
graph_walker.determine_wants, graph_walker, self.progress,
get_tagged=self.get_tagged)
# Note the fact that client is only processing responses related
# to the have lines it sent, and any other data (including side-
# band) will be be considered a fatal error.
self._processing_have_lines = True
# Did the process short-circuit (e.g. in a stateless RPC call)? Note
# that the client still expects a 0-object pack in most cases.
# Also, if it also happens that the object_iter is instantiated
# with a graph walker with an implementation that talks over the
# wire (which is this instance of this class) this will actually
# iterate through everything and write things out to the wire.
if len(objects_iter) == 0:
return
# The provided haves are processed, and it is safe to send side-
# band data now.
self._processing_have_lines = False
if not graph_walker.handle_done(
not self.has_capability(CAPABILITY_NO_DONE),
self._done_received):
return
self.progress(
("counting objects: %d, done.\n" % len(objects_iter)).encode(
'ascii'))
write_pack_objects(ProtocolFile(None, write), objects_iter)
# we are done
self.proto.write_pkt_line(None)
def _split_proto_line(line, allowed):
"""Split a line read from the wire.
Args:
line: The line read from the wire.
allowed: An iterable of command names that should be allowed.
Command names not listed below as possible return values will be
ignored. If None, any commands from the possible return values are
allowed.
Returns: a tuple having one of the following forms:
('want', obj_id)
('have', obj_id)
('done', None)
(None, None) (for a flush-pkt)
Raises:
UnexpectedCommandError: if the line cannot be parsed into one of the
allowed return values.
"""
if not line:
fields = [None]
else:
fields = line.rstrip(b'\n').split(b' ', 1)
command = fields[0]
if allowed is not None and command not in allowed:
raise UnexpectedCommandError(command)
if len(fields) == 1 and command in (COMMAND_DONE, None):
return (command, None)
elif len(fields) == 2:
if command in (COMMAND_WANT, COMMAND_HAVE, COMMAND_SHALLOW,
COMMAND_UNSHALLOW):
if not valid_hexsha(fields[1]):
raise GitProtocolError("Invalid sha")
return tuple(fields)
elif command == COMMAND_DEEPEN:
return command, int(fields[1])
raise GitProtocolError('Received invalid line from client: %r' % line)
def _find_shallow(store, heads, depth):
"""Find shallow commits according to a given depth.
Args:
store: An ObjectStore for looking up objects.
heads: Iterable of head SHAs to start walking from.
depth: The depth of ancestors to include. A depth of one includes
only the heads themselves.
Returns: A tuple of (shallow, not_shallow), sets of SHAs that should be
considered shallow and unshallow according to the arguments. Note that
these sets may overlap if a commit is reachable along multiple paths.
"""
parents = {}
def get_parents(sha):
result = parents.get(sha, None)
if not result:
result = store[sha].parents
parents[sha] = result
return result
todo = [] # stack of (sha, depth)
for head_sha in heads:
obj = store.peel_sha(head_sha)
if isinstance(obj, Commit):
todo.append((obj.id, 1))
not_shallow = set()
shallow = set()
while todo:
sha, cur_depth = todo.pop()
if cur_depth < depth:
not_shallow.add(sha)
new_depth = cur_depth + 1
todo.extend((p, new_depth) for p in get_parents(sha))
else:
shallow.add(sha)
return shallow, not_shallow
def _want_satisfied(store, haves, want, earliest):
o = store[want]
pending = collections.deque([o])
known = set([want])
while pending:
commit = pending.popleft()
if commit.id in haves:
return True
if commit.type_name != b"commit":
# non-commit wants are assumed to be satisfied
continue
for parent in commit.parents:
if parent in known:
continue
known.add(parent)
parent_obj = store[parent]
# TODO: handle parents with later commit times than children
if parent_obj.commit_time >= earliest:
pending.append(parent_obj)
return False
def _all_wants_satisfied(store, haves, wants):
"""Check whether all the current wants are satisfied by a set of haves.
Args:
store: Object store to retrieve objects from
haves: A set of commits we know the client has.
wants: A set of commits the client wants
Note: Wants are specified with set_wants rather than passed in since
in the current interface they are determined outside this class.
"""
haves = set(haves)
if haves:
earliest = min([store[h].commit_time for h in haves])
else:
earliest = 0
for want in wants:
if not _want_satisfied(store, haves, want, earliest):
return False
return True
class _ProtocolGraphWalker(object):
"""A graph walker that knows the git protocol.
As a graph walker, this class implements ack(), next(), and reset(). It
also contains some base methods for interacting with the wire and walking
the commit tree.
The work of determining which acks to send is passed on to the
implementation instance stored in _impl. The reason for this is that we do
not know at object creation time what ack level the protocol requires. A
call to set_ack_type() is required to set up the implementation, before
any calls to next() or ack() are made.
"""
def __init__(self, handler, object_store, get_peeled, get_symrefs):
self.handler = handler
self.store = object_store
self.get_peeled = get_peeled
self.get_symrefs = get_symrefs
self.proto = handler.proto
self.http_req = handler.http_req
self.advertise_refs = handler.advertise_refs
self._wants = []
self.shallow = set()
self.client_shallow = set()
self.unshallow = set()
self._cached = False
self._cache = []
self._cache_index = 0
self._impl = None
def determine_wants(self, heads):
"""Determine the wants for a set of heads.
The given heads are advertised to the client, who then specifies which
refs he wants using 'want' lines. This portion of the protocol is the
same regardless of ack type, and in fact is used to set the ack type of
the ProtocolGraphWalker.
If the client has the 'shallow' capability, this method also reads and
responds to the 'shallow' and 'deepen' lines from the client. These are
not part of the wants per se, but they set up necessary state for
walking the graph. Additionally, later code depends on this method
consuming everything up to the first 'have' line.
Args:
heads: a dict of refname->SHA1 to advertise
Returns: a list of SHA1s requested by the client
"""
symrefs = self.get_symrefs()
values = set(heads.values())
if self.advertise_refs or not self.http_req:
for i, (ref, sha) in enumerate(sorted(heads.items())):
try:
peeled_sha = self.get_peeled(ref)
except KeyError:
# Skip refs that are inaccessible
# TODO(jelmer): Integrate with Repo.fetch_objects refs
# logic.
continue
line = sha + b' ' + ref
if not i:
line += (b'\x00' +
self.handler.capability_line(
self.handler.capabilities() +
symref_capabilities(symrefs.items())))
self.proto.write_pkt_line(line + b'\n')
if peeled_sha != sha:
self.proto.write_pkt_line(
peeled_sha + b' ' + ref + ANNOTATED_TAG_SUFFIX + b'\n')
# i'm done..
self.proto.write_pkt_line(None)
if self.advertise_refs:
return []
# Now client will sending want want want commands
want = self.proto.read_pkt_line()
if not want:
return []
line, caps = extract_want_line_capabilities(want)
self.handler.set_client_capabilities(caps)
self.set_ack_type(ack_type(caps))
allowed = (COMMAND_WANT, COMMAND_SHALLOW, COMMAND_DEEPEN, None)
command, sha = _split_proto_line(line, allowed)
want_revs = []
while command == COMMAND_WANT:
if sha not in values:
raise GitProtocolError(
'Client wants invalid object %s' % sha)
want_revs.append(sha)
command, sha = self.read_proto_line(allowed)
self.set_wants(want_revs)
if command in (COMMAND_SHALLOW, COMMAND_DEEPEN):
self.unread_proto_line(command, sha)
self._handle_shallow_request(want_revs)
if self.http_req and self.proto.eof():
# The client may close the socket at this point, expecting a
# flush-pkt from the server. We might be ready to send a packfile
# at this point, so we need to explicitly short-circuit in this
# case.
return []
return want_revs
def unread_proto_line(self, command, value):
if isinstance(value, int):
value = str(value).encode('ascii')
self.proto.unread_pkt_line(command + b' ' + value)
def ack(self, have_ref):
if len(have_ref) != 40:
raise ValueError("invalid sha %r" % have_ref)
return self._impl.ack(have_ref)
def reset(self):
self._cached = True
self._cache_index = 0
def next(self):
if not self._cached:
if not self._impl and self.http_req:
return None
return next(self._impl)
self._cache_index += 1
if self._cache_index > len(self._cache):
return None
return self._cache[self._cache_index]
__next__ = next
def read_proto_line(self, allowed):
"""Read a line from the wire.
Args:
allowed: An iterable of command names that should be allowed.
Returns: A tuple of (command, value); see _split_proto_line.
Raises:
UnexpectedCommandError: If an error occurred reading the line.
"""
return _split_proto_line(self.proto.read_pkt_line(), allowed)
def _handle_shallow_request(self, wants):
while True:
command, val = self.read_proto_line(
(COMMAND_DEEPEN, COMMAND_SHALLOW))
if command == COMMAND_DEEPEN:
depth = val
break
self.client_shallow.add(val)
self.read_proto_line((None,)) # consume client's flush-pkt
shallow, not_shallow = _find_shallow(self.store, wants, depth)
# Update self.shallow instead of reassigning it since we passed a
# reference to it before this method was called.
self.shallow.update(shallow - not_shallow)
new_shallow = self.shallow - self.client_shallow
unshallow = self.unshallow = not_shallow & self.client_shallow
for sha in sorted(new_shallow):
self.proto.write_pkt_line(COMMAND_SHALLOW + b' ' + sha)
for sha in sorted(unshallow):
self.proto.write_pkt_line(COMMAND_UNSHALLOW + b' ' + sha)
self.proto.write_pkt_line(None)
def notify_done(self):
# relay the message down to the handler.
self.handler.notify_done()
def send_ack(self, sha, ack_type=b''):
if ack_type:
ack_type = b' ' + ack_type
self.proto.write_pkt_line(b'ACK ' + sha + ack_type + b'\n')
def send_nak(self):
self.proto.write_pkt_line(b'NAK\n')
def handle_done(self, done_required, done_received):
# Delegate this to the implementation.
return self._impl.handle_done(done_required, done_received)
def set_wants(self, wants):
self._wants = wants
def all_wants_satisfied(self, haves):
"""Check whether all the current wants are satisfied by a set of haves.
Args:
haves: A set of commits we know the client has.
Note: Wants are specified with set_wants rather than passed in since
in the current interface they are determined outside this class.
"""
return _all_wants_satisfied(self.store, haves, self._wants)
def set_ack_type(self, ack_type):
impl_classes = {
MULTI_ACK: MultiAckGraphWalkerImpl,
MULTI_ACK_DETAILED: MultiAckDetailedGraphWalkerImpl,
SINGLE_ACK: SingleAckGraphWalkerImpl,
}
self._impl = impl_classes[ack_type](self)
_GRAPH_WALKER_COMMANDS = (COMMAND_HAVE, COMMAND_DONE, None)
class SingleAckGraphWalkerImpl(object):
"""Graph walker implementation that speaks the single-ack protocol."""
def __init__(self, walker):
self.walker = walker
self._common = []
def ack(self, have_ref):
if not self._common:
self.walker.send_ack(have_ref)
self._common.append(have_ref)
def next(self):
command, sha = self.walker.read_proto_line(_GRAPH_WALKER_COMMANDS)
if command in (None, COMMAND_DONE):
# defer the handling of done
self.walker.notify_done()
return None
elif command == COMMAND_HAVE:
return sha
__next__ = next
def handle_done(self, done_required, done_received):
if not self._common:
self.walker.send_nak()
if done_required and not done_received:
# we are not done, especially when done is required; skip
# the pack for this request and especially do not handle
# the done.
return False
if not done_received and not self._common:
# Okay we are not actually done then since the walker picked
# up no haves. This is usually triggered when client attempts
# to pull from a source that has no common base_commit.
# See: test_server.MultiAckDetailedGraphWalkerImplTestCase.\
# test_multi_ack_stateless_nodone
return False
return True
class MultiAckGraphWalkerImpl(object):
"""Graph walker implementation that speaks the multi-ack protocol."""
def __init__(self, walker):
self.walker = walker
self._found_base = False
self._common = []
def ack(self, have_ref):
self._common.append(have_ref)
if not self._found_base:
self.walker.send_ack(have_ref, b'continue')
if self.walker.all_wants_satisfied(self._common):
self._found_base = True
# else we blind ack within next
def next(self):
while True:
command, sha = self.walker.read_proto_line(_GRAPH_WALKER_COMMANDS)
if command is None:
self.walker.send_nak()
# in multi-ack mode, a flush-pkt indicates the client wants to
# flush but more have lines are still coming
continue
elif command == COMMAND_DONE:
self.walker.notify_done()
return None
elif command == COMMAND_HAVE:
if self._found_base:
# blind ack
self.walker.send_ack(sha, b'continue')
return sha
__next__ = next
def handle_done(self, done_required, done_received):
if done_required and not done_received:
# we are not done, especially when done is required; skip
# the pack for this request and especially do not handle
# the done.
return False
if not done_received and not self._common:
# Okay we are not actually done then since the walker picked
# up no haves. This is usually triggered when client attempts
# to pull from a source that has no common base_commit.
# See: test_server.MultiAckDetailedGraphWalkerImplTestCase.\
# test_multi_ack_stateless_nodone
return False
# don't nak unless no common commits were found, even if not
# everything is satisfied
if self._common:
self.walker.send_ack(self._common[-1])
else:
self.walker.send_nak()
return True
class MultiAckDetailedGraphWalkerImpl(object):
"""Graph walker implementation speaking the multi-ack-detailed protocol."""
def __init__(self, walker):
self.walker = walker
self._common = []
def ack(self, have_ref):
# Should only be called iff have_ref is common
self._common.append(have_ref)
self.walker.send_ack(have_ref, b'common')
def next(self):
while True:
command, sha = self.walker.read_proto_line(_GRAPH_WALKER_COMMANDS)
if command is None:
if self.walker.all_wants_satisfied(self._common):
self.walker.send_ack(self._common[-1], b'ready')
self.walker.send_nak()
if self.walker.http_req:
# The HTTP version of this request a flush-pkt always
# signifies an end of request, so we also return
# nothing here as if we are done (but not really, as
# it depends on whether no-done capability was
# specified and that's handled in handle_done which
# may or may not call post_nodone_check depending on
# that).
return None
elif command == COMMAND_DONE:
# Let the walker know that we got a done.
self.walker.notify_done()
break
elif command == COMMAND_HAVE:
# return the sha and let the caller ACK it with the
# above ack method.
return sha
# don't nak unless no common commits were found, even if not
# everything is satisfied
__next__ = next
def handle_done(self, done_required, done_received):
if done_required and not done_received:
# we are not done, especially when done is required; skip
# the pack for this request and especially do not handle
# the done.
return False
if not done_received and not self._common:
# Okay we are not actually done then since the walker picked
# up no haves. This is usually triggered when client attempts
# to pull from a source that has no common base_commit.
# See: test_server.MultiAckDetailedGraphWalkerImplTestCase.\
# test_multi_ack_stateless_nodone
return False
# don't nak unless no common commits were found, even if not
# everything is satisfied
if self._common:
self.walker.send_ack(self._common[-1])
else:
self.walker.send_nak()
return True
class ReceivePackHandler(PackHandler):
"""Protocol handler for downloading a pack from the client."""
def __init__(self, backend, args, proto, http_req=None,
advertise_refs=False):
super(ReceivePackHandler, self).__init__(
backend, proto, http_req=http_req)
self.repo = backend.open_repository(args[0])
self.advertise_refs = advertise_refs
@classmethod
def capabilities(cls):
return [CAPABILITY_REPORT_STATUS, CAPABILITY_DELETE_REFS,
CAPABILITY_QUIET, CAPABILITY_OFS_DELTA,
CAPABILITY_SIDE_BAND_64K, CAPABILITY_NO_DONE]
def _apply_pack(self, refs):
all_exceptions = (IOError, OSError, ChecksumMismatch, ApplyDeltaError,
AssertionError, socket.error, zlib.error,
ObjectFormatException)
status = []
will_send_pack = False
for command in refs:
if command[1] != ZERO_SHA:
will_send_pack = True
if will_send_pack:
# TODO: more informative error messages than just the exception
# string
try:
recv = getattr(self.proto, "recv", None)
self.repo.object_store.add_thin_pack(self.proto.read, recv)
status.append((b'unpack', b'ok'))
except all_exceptions as e:
status.append((b'unpack', str(e).replace('\n', '')))
# The pack may still have been moved in, but it may contain
# broken objects. We trust a later GC to clean it up.
else:
# The git protocol want to find a status entry related to unpack
# process even if no pack data has been sent.
status.append((b'unpack', b'ok'))
for oldsha, sha, ref in refs:
ref_status = b'ok'
try:
if sha == ZERO_SHA:
if CAPABILITY_DELETE_REFS not in self.capabilities():
raise GitProtocolError(
'Attempted to delete refs without delete-refs '
'capability.')
try:
self.repo.refs.remove_if_equals(ref, oldsha)
except all_exceptions:
ref_status = b'failed to delete'
else:
try:
self.repo.refs.set_if_equals(ref, oldsha, sha)
except all_exceptions:
ref_status = b'failed to write'
except KeyError:
ref_status = b'bad ref'
status.append((ref, ref_status))
return status
def _report_status(self, status):
if self.has_capability(CAPABILITY_SIDE_BAND_64K):
writer = BufferedPktLineWriter(
lambda d: self.proto.write_sideband(SIDE_BAND_CHANNEL_DATA, d))
write = writer.write
def flush():
writer.flush()
self.proto.write_pkt_line(None)
else:
write = self.proto.write_pkt_line
def flush():
pass
for name, msg in status:
if name == b'unpack':
write(b'unpack ' + msg + b'\n')
elif msg == b'ok':
write(b'ok ' + name + b'\n')
else:
write(b'ng ' + name + b' ' + msg + b'\n')
write(None)
flush()
+ def _on_post_receive(self, client_refs):
+ hook = self.repo.hooks.get('post-receive', None)
+ if not hook:
+ return
+ try:
+ output = hook.execute(client_refs)
+ if output:
+ self.proto.write_sideband(SIDE_BAND_CHANNEL_PROGRESS, output)
+ except HookError as err:
+ self.proto.write_sideband(SIDE_BAND_CHANNEL_FATAL, repr(err))
+
def handle(self):
if self.advertise_refs or not self.http_req:
refs = sorted(self.repo.get_refs().items())
symrefs = sorted(self.repo.refs.get_symrefs().items())
if not refs:
refs = [(CAPABILITIES_REF, ZERO_SHA)]
self.proto.write_pkt_line(
refs[0][1] + b' ' + refs[0][0] + b'\0' +
self.capability_line(
self.capabilities() + symref_capabilities(symrefs)) + b'\n')
for i in range(1, len(refs)):
ref = refs[i]
self.proto.write_pkt_line(ref[1] + b' ' + ref[0] + b'\n')
self.proto.write_pkt_line(None)
if self.advertise_refs:
return
client_refs = []
ref = self.proto.read_pkt_line()
# if ref is none then client doesnt want to send us anything..
if ref is None:
return
ref, caps = extract_capabilities(ref)
self.set_client_capabilities(caps)
# client will now send us a list of (oldsha, newsha, ref)
while ref:
client_refs.append(ref.split())
ref = self.proto.read_pkt_line()
# backend can now deal with this refs and read a pack using self.read
status = self._apply_pack(client_refs)
+ self._on_post_receive(client_refs)
+
# when we have read all the pack from the client, send a status report
# if the client asked for it
if self.has_capability(CAPABILITY_REPORT_STATUS):
self._report_status(status)
class UploadArchiveHandler(Handler):
def __init__(self, backend, args, proto, http_req=None):
super(UploadArchiveHandler, self).__init__(backend, proto, http_req)
self.repo = backend.open_repository(args[0])
def handle(self):
def write(x):
return self.proto.write_sideband(SIDE_BAND_CHANNEL_DATA, x)
arguments = []
for pkt in self.proto.read_pkt_seq():
(key, value) = pkt.split(b' ', 1)
if key != b'argument':
raise GitProtocolError('unknown command %s' % key)
arguments.append(value.rstrip(b'\n'))
prefix = b''
format = 'tar'
i = 0
store = self.repo.object_store
while i < len(arguments):
argument = arguments[i]
if argument == b'--prefix':
i += 1
prefix = arguments[i]
elif argument == b'--format':
i += 1
format = arguments[i].decode('ascii')
else:
commit_sha = self.repo.refs[argument]
tree = store[store[commit_sha].tree]
i += 1
self.proto.write_pkt_line(b'ACK\n')
self.proto.write_pkt_line(None)
for chunk in tar_stream(
store, tree, mtime=time.time(), prefix=prefix, format=format):
write(chunk)
self.proto.write_pkt_line(None)
# Default handler classes for git services.
DEFAULT_HANDLERS = {
b'git-upload-pack': UploadPackHandler,
b'git-receive-pack': ReceivePackHandler,
b'git-upload-archive': UploadArchiveHandler,
}
class TCPGitRequestHandler(SocketServer.StreamRequestHandler):
def __init__(self, handlers, *args, **kwargs):
self.handlers = handlers
SocketServer.StreamRequestHandler.__init__(self, *args, **kwargs)
def handle(self):
proto = ReceivableProtocol(self.connection.recv, self.wfile.write)
command, args = proto.read_cmd()
logger.info('Handling %s request, args=%s', command, args)
cls = self.handlers.get(command, None)
if not callable(cls):
raise GitProtocolError('Invalid service %s' % command)
h = cls(self.server.backend, args, proto)
h.handle()
class TCPGitServer(SocketServer.TCPServer):
allow_reuse_address = True
serve = SocketServer.TCPServer.serve_forever
def _make_handler(self, *args, **kwargs):
return TCPGitRequestHandler(self.handlers, *args, **kwargs)
def __init__(self, backend, listen_addr, port=TCP_GIT_PORT, handlers=None):
self.handlers = dict(DEFAULT_HANDLERS)
if handlers is not None:
self.handlers.update(handlers)
self.backend = backend
logger.info('Listening for TCP connections on %s:%d',
listen_addr, port)
SocketServer.TCPServer.__init__(self, (listen_addr, port),
self._make_handler)
def verify_request(self, request, client_address):
logger.info('Handling request from %s', client_address)
return True
def handle_error(self, request, client_address):
logger.exception('Exception happened during processing of request '
'from %s', client_address)
def main(argv=sys.argv):
"""Entry point for starting a TCP git server."""
import optparse
parser = optparse.OptionParser()
parser.add_option("-l", "--listen_address", dest="listen_address",
default="localhost",
help="Binding IP address.")
parser.add_option("-p", "--port", dest="port", type=int,
default=TCP_GIT_PORT,
help="Binding TCP port.")
options, args = parser.parse_args(argv)
log_utils.default_logging_config()
if len(args) > 1:
gitdir = args[1]
else:
gitdir = '.'
# TODO(jelmer): Support git-daemon-export-ok and --export-all.
backend = FileSystemBackend(gitdir)
server = TCPGitServer(backend, options.listen_address, options.port)
server.serve_forever()
def serve_command(handler_cls, argv=sys.argv, backend=None, inf=sys.stdin,
outf=sys.stdout):
"""Serve a single command.
This is mostly useful for the implementation of commands used by e.g.
git+ssh.
Args:
handler_cls: `Handler` class to use for the request
argv: execv-style command-line arguments. Defaults to sys.argv.
backend: `Backend` to use
inf: File-like object to read from, defaults to standard input.
outf: File-like object to write to, defaults to standard output.
Returns: Exit code for use with sys.exit. 0 on success, 1 on failure.
"""
if backend is None:
backend = FileSystemBackend()
def send_fn(data):
outf.write(data)
outf.flush()
proto = Protocol(inf.read, send_fn)
handler = handler_cls(backend, argv[1:], proto)
# FIXME: Catch exceptions and write a single-line summary to outf.
handler.handle()
return 0
def generate_info_refs(repo):
"""Generate an info refs file."""
refs = repo.get_refs()
return write_info_refs(refs, repo.object_store)
def generate_objects_info_packs(repo):
"""Generate an index for for packs."""
for pack in repo.object_store.packs:
yield (
b'P ' + pack.data.filename.encode(sys.getfilesystemencoding()) +
b'\n')
def update_server_info(repo):
"""Generate server info for dumb file access.
This generates info/refs and objects/info/packs,
similar to "git update-server-info".
"""
repo._put_named_file(
os.path.join('info', 'refs'),
b"".join(generate_info_refs(repo)))
repo._put_named_file(
os.path.join('objects', 'info', 'packs'),
b"".join(generate_objects_info_packs(repo)))
if __name__ == '__main__':
main()