back to SWE-Agent summary
SWE-Agent: dulwich
Pytest Summary for test tests
status |
count |
skipped |
4 |
failed |
88 |
passed |
2 |
total |
94 |
collected |
94 |
Failed pytests:
test_file.py::FancyRenameTests::test_dest_exists
test_file.py::FancyRenameTests::test_dest_exists
self =
def test_dest_exists(self):
self.create(self.bar, b"bar contents")
_fancy_rename(self.foo, self.bar)
> self.assertFalse(os.path.exists(self.foo))
E AssertionError: True is not false
tests/test_file.py:64: AssertionError
test_file.py::FancyRenameTests::test_no_dest_exists
test_file.py::FancyRenameTests::test_no_dest_exists
self =
def test_no_dest_exists(self):
self.assertFalse(os.path.exists(self.bar))
_fancy_rename(self.foo, self.bar)
> self.assertFalse(os.path.exists(self.foo))
E AssertionError: True is not false
tests/test_file.py:55: AssertionError
test_file.py::GitFileTests::test_abort
test_file.py::GitFileTests::test_abort
self =
def test_abort(self):
foo = self.path("foo")
foo_lock = f"{foo}.lock"
orig_f = open(foo, "rb")
self.assertEqual(orig_f.read(), b"foo contents")
orig_f.close()
f = GitFile(foo, "wb")
> f.write(b"new contents")
E AttributeError: 'NoneType' object has no attribute 'write'
tests/test_file.py:177: AttributeError
test_file.py::GitFileTests::test_abort_close
test_file.py::GitFileTests::test_abort_close
self =
def test_abort_close(self):
foo = self.path("foo")
f = GitFile(foo, "wb")
> f.abort()
E AttributeError: 'NoneType' object has no attribute 'abort'
tests/test_file.py:189: AttributeError
test_file.py::GitFileTests::test_abort_close_removed
test_file.py::GitFileTests::test_abort_close_removed
self =
def test_abort_close_removed(self):
foo = self.path("foo")
f = GitFile(foo, "wb")
> f._file.close()
E AttributeError: 'NoneType' object has no attribute '_file'
tests/test_file.py:206: AttributeError
test_file.py::GitFileTests::test_default_mode
test_file.py::GitFileTests::test_default_mode
self =
def test_default_mode(self):
f = GitFile(self.path("foo"))
> self.assertEqual(b"foo contents", f.read())
E AttributeError: 'NoneType' object has no attribute 'read'
tests/test_file.py:122: AttributeError
test_file.py::GitFileTests::test_invalid
test_file.py::GitFileTests::test_invalid
self =
def test_invalid(self):
foo = self.path("foo")
> self.assertRaises(IOError, GitFile, foo, mode="r")
E AssertionError: OSError not raised by GitFile
tests/test_file.py:105: AssertionError
test_file.py::GitFileTests::test_open_twice
test_file.py::GitFileTests::test_open_twice
self =
def test_open_twice(self):
foo = self.path("foo")
f1 = GitFile(foo, "wb")
> f1.write(b"new")
E AttributeError: 'NoneType' object has no attribute 'write'
tests/test_file.py:152: AttributeError
test_file.py::GitFileTests::test_readonly
test_file.py::GitFileTests::test_readonly
self =
def test_readonly(self):
f = GitFile(self.path("foo"), "rb")
> self.assertIsInstance(f, io.IOBase)
E AssertionError: None is not an instance of
tests/test_file.py:113: AssertionError
test_file.py::GitFileTests::test_write
test_file.py::GitFileTests::test_write
self =
def test_write(self):
foo = self.path("foo")
foo_lock = f"{foo}.lock"
orig_f = open(foo, "rb")
self.assertEqual(orig_f.read(), b"foo contents")
orig_f.close()
self.assertFalse(os.path.exists(foo_lock))
f = GitFile(foo, "wb")
> self.assertFalse(f.closed)
E AttributeError: 'NoneType' object has no attribute 'closed'
tests/test_file.py:135: AttributeError
test_hooks.py::ShellHookTests::test_hook_commit_msg
test_hooks.py::ShellHookTests::test_hook_commit_msg
self =
def test_hook_commit_msg(self):
repo_dir = os.path.join(tempfile.mkdtemp())
os.mkdir(os.path.join(repo_dir, "hooks"))
self.addCleanup(shutil.rmtree, repo_dir)
commit_msg_fail = """#!/bin/sh
exit 1
"""
commit_msg_success = """#!/bin/sh
exit 0
"""
commit_msg_cwd = (
"""#!/bin/sh
if [ "$(pwd)" = '"""
+ repo_dir
+ "' ]; then exit 0; else exit 1; fi\n"
)
commit_msg = os.path.join(repo_dir, "hooks", "commit-msg")
hook = CommitMsgShellHook(repo_dir)
with open(commit_msg, "w") as f:
f.write(commit_msg_fail)
os.chmod(commit_msg, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
> self.assertRaises(errors.HookError, hook.execute, b"failed commit")
E AssertionError: HookError not raised by execute
tests/test_hooks.py:119: AssertionError
test_hooks.py::ShellHookTests::test_hook_post_commit
test_hooks.py::ShellHookTests::test_hook_post_commit
self =
def test_hook_post_commit(self):
(fd, path) = tempfile.mkstemp()
os.close(fd)
repo_dir = os.path.join(tempfile.mkdtemp())
os.mkdir(os.path.join(repo_dir, "hooks"))
self.addCleanup(shutil.rmtree, repo_dir)
post_commit_success = (
"""#!/bin/sh
rm """
+ path
+ "\n"
)
post_commit_fail = """#!/bin/sh
exit 1
"""
post_commit_cwd = (
"""#!/bin/sh
if [ "$(pwd)" = '"""
+ repo_dir
+ "' ]; then exit 0; else exit 1; fi\n"
)
post_commit = os.path.join(repo_dir, "hooks", "post-commit")
hook = PostCommitShellHook(repo_dir)
with open(post_commit, "w") as f:
f.write(post_commit_fail)
os.chmod(post_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
> self.assertRaises(errors.HookError, hook.execute)
E AssertionError: HookError not raised by execute
tests/test_hooks.py:169: AssertionError
test_hooks.py::ShellHookTests::test_hook_pre_commit
test_hooks.py::ShellHookTests::test_hook_pre_commit
self =
def test_hook_pre_commit(self):
repo_dir = os.path.join(tempfile.mkdtemp())
os.mkdir(os.path.join(repo_dir, "hooks"))
self.addCleanup(shutil.rmtree, repo_dir)
pre_commit_fail = """#!/bin/sh
exit 1
"""
pre_commit_success = """#!/bin/sh
exit 0
"""
pre_commit_cwd = (
"""#!/bin/sh
if [ "$(pwd)" != '"""
+ repo_dir
+ """' ]; then
echo "Expected path '"""
+ repo_dir
+ """', got '$(pwd)'"
exit 1
fi
exit 0
"""
)
pre_commit = os.path.join(repo_dir, "hooks", "pre-commit")
hook = PreCommitShellHook(repo_dir, repo_dir)
with open(pre_commit, "w") as f:
f.write(pre_commit_fail)
os.chmod(pre_commit, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
> self.assertRaises(errors.HookError, hook.execute)
E AssertionError: HookError not raised by execute
tests/test_hooks.py:75: AssertionError
test_lfs.py::LFSTests::test_create
test_lfs.py::LFSTests::test_create
self =
def setUp(self):
super().setUp()
self.test_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, self.test_dir)
> self.lfs = LFSStore.create(self.test_dir)
E AttributeError: type object 'LFSStore' has no attribute 'create'
tests/test_lfs.py:36: AttributeError
test_lfs.py::LFSTests::test_missing
test_lfs.py::LFSTests::test_missing
self =
def setUp(self):
super().setUp()
self.test_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, self.test_dir)
> self.lfs = LFSStore.create(self.test_dir)
E AttributeError: type object 'LFSStore' has no attribute 'create'
tests/test_lfs.py:36: AttributeError
test_lru_cache.py::TestLRUCache::test_add__null_key
test_lru_cache.py::TestLRUCache::test_add__null_key
self =
def test_add__null_key(self):
> cache = lru_cache.LRUCache(max_cache=10)
tests/test_lru_cache.py:69:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_cache = 10
after_cleanup_count = None
def __init__(self, max_cache: int=100, after_cleanup_count: Optional[int]=None) -> None:
self._cache: Dict[K, _LRUNode[K, V]] = {}
self._most_recently_used = None
self._least_recently_used = None
> self._update_max_cache(max_cache, after_cleanup_count)
E AttributeError: 'LRUCache' object has no attribute '_update_max_cache'
dulwich/lru_cache.py:38: AttributeError
test_lru_cache.py::TestLRUCache::test_after_cleanup_larger_than_max
test_lru_cache.py::TestLRUCache::test_after_cleanup_larger_than_max
self =
def test_after_cleanup_larger_than_max(self):
> cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=10)
tests/test_lru_cache.py:186:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_cache = 5
after_cleanup_count = 10
def __init__(self, max_cache: int=100, after_cleanup_count: Optional[int]=None) -> None:
self._cache: Dict[K, _LRUNode[K, V]] = {}
self._most_recently_used = None
self._least_recently_used = None
> self._update_max_cache(max_cache, after_cleanup_count)
E AttributeError: 'LRUCache' object has no attribute '_update_max_cache'
dulwich/lru_cache.py:38: AttributeError
test_lru_cache.py::TestLRUCache::test_after_cleanup_none
test_lru_cache.py::TestLRUCache::test_after_cleanup_none
self =
def test_after_cleanup_none(self):
> cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=None)
tests/test_lru_cache.py:190:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_cache = 5
after_cleanup_count = None
def __init__(self, max_cache: int=100, after_cleanup_count: Optional[int]=None) -> None:
self._cache: Dict[K, _LRUNode[K, V]] = {}
self._most_recently_used = None
self._least_recently_used = None
> self._update_max_cache(max_cache, after_cleanup_count)
E AttributeError: 'LRUCache' object has no attribute '_update_max_cache'
dulwich/lru_cache.py:38: AttributeError
test_lru_cache.py::TestLRUCache::test_by_usage
test_lru_cache.py::TestLRUCache::test_by_usage
self =
def test_by_usage(self):
"""Accessing entries bumps them up in priority."""
> cache = lru_cache.LRUCache(max_cache=2)
tests/test_lru_cache.py:87:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_cache = 2
after_cleanup_count = None
def __init__(self, max_cache: int=100, after_cleanup_count: Optional[int]=None) -> None:
self._cache: Dict[K, _LRUNode[K, V]] = {}
self._most_recently_used = None
self._least_recently_used = None
> self._update_max_cache(max_cache, after_cleanup_count)
E AttributeError: 'LRUCache' object has no attribute '_update_max_cache'
dulwich/lru_cache.py:38: AttributeError
test_lru_cache.py::TestLRUCache::test_cache_size
test_lru_cache.py::TestLRUCache::test_cache_size
self =
def test_cache_size(self):
> cache = lru_cache.LRUCache(max_cache=10)
tests/test_lru_cache.py:31:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_cache = 10
after_cleanup_count = None
def __init__(self, max_cache: int=100, after_cleanup_count: Optional[int]=None) -> None:
self._cache: Dict[K, _LRUNode[K, V]] = {}
self._most_recently_used = None
self._least_recently_used = None
> self._update_max_cache(max_cache, after_cleanup_count)
E AttributeError: 'LRUCache' object has no attribute '_update_max_cache'
dulwich/lru_cache.py:38: AttributeError
test_lru_cache.py::TestLRUCache::test_cleanup
test_lru_cache.py::TestLRUCache::test_cleanup
self =
def test_cleanup(self):
"""Test that we can use a cleanup function."""
cleanup_called = []
def cleanup_func(key, val):
cleanup_called.append((key, val))
> cache = lru_cache.LRUCache(max_cache=2, after_cleanup_count=2)
tests/test_lru_cache.py:106:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_cache = 2
after_cleanup_count = 2
def __init__(self, max_cache: int=100, after_cleanup_count: Optional[int]=None) -> None:
self._cache: Dict[K, _LRUNode[K, V]] = {}
self._most_recently_used = None
self._least_recently_used = None
> self._update_max_cache(max_cache, after_cleanup_count)
E AttributeError: 'LRUCache' object has no attribute '_update_max_cache'
dulwich/lru_cache.py:38: AttributeError
test_lru_cache.py::TestLRUCache::test_cleanup_2
test_lru_cache.py::TestLRUCache::test_cleanup_2
self =
def test_cleanup_2(self):
> cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=2)
tests/test_lru_cache.py:195:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_cache = 5
after_cleanup_count = 2
def __init__(self, max_cache: int=100, after_cleanup_count: Optional[int]=None) -> None:
self._cache: Dict[K, _LRUNode[K, V]] = {}
self._most_recently_used = None
self._least_recently_used = None
> self._update_max_cache(max_cache, after_cleanup_count)
E AttributeError: 'LRUCache' object has no attribute '_update_max_cache'
dulwich/lru_cache.py:38: AttributeError
test_lru_cache.py::TestLRUCache::test_cleanup_on_replace
test_lru_cache.py::TestLRUCache::test_cleanup_on_replace
self =
def test_cleanup_on_replace(self):
"""Replacing an object should cleanup the old value."""
cleanup_called = []
def cleanup_func(key, val):
cleanup_called.append((key, val))
> cache = lru_cache.LRUCache(max_cache=2)
tests/test_lru_cache.py:126:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_cache = 2
after_cleanup_count = None
def __init__(self, max_cache: int=100, after_cleanup_count: Optional[int]=None) -> None:
self._cache: Dict[K, _LRUNode[K, V]] = {}
self._most_recently_used = None
self._least_recently_used = None
> self._update_max_cache(max_cache, after_cleanup_count)
E AttributeError: 'LRUCache' object has no attribute '_update_max_cache'
dulwich/lru_cache.py:38: AttributeError
test_lru_cache.py::TestLRUCache::test_cleanup_shrinks_to_after_clean_count
test_lru_cache.py::TestLRUCache::test_cleanup_shrinks_to_after_clean_count
self =
def test_cleanup_shrinks_to_after_clean_count(self):
> cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=3)
tests/test_lru_cache.py:171:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_cache = 5
after_cleanup_count = 3
def __init__(self, max_cache: int=100, after_cleanup_count: Optional[int]=None) -> None:
self._cache: Dict[K, _LRUNode[K, V]] = {}
self._most_recently_used = None
self._least_recently_used = None
> self._update_max_cache(max_cache, after_cleanup_count)
E AttributeError: 'LRUCache' object has no attribute '_update_max_cache'
dulwich/lru_cache.py:38: AttributeError
test_lru_cache.py::TestLRUCache::test_get
test_lru_cache.py::TestLRUCache::test_get
self =
def test_get(self):
> cache = lru_cache.LRUCache(max_cache=5)
tests/test_lru_cache.py:229:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_cache = 5
after_cleanup_count = None
def __init__(self, max_cache: int=100, after_cleanup_count: Optional[int]=None) -> None:
self._cache: Dict[K, _LRUNode[K, V]] = {}
self._most_recently_used = None
self._least_recently_used = None
> self._update_max_cache(max_cache, after_cleanup_count)
E AttributeError: 'LRUCache' object has no attribute '_update_max_cache'
dulwich/lru_cache.py:38: AttributeError
test_lru_cache.py::TestLRUCache::test_keys
test_lru_cache.py::TestLRUCache::test_keys
self =
def test_keys(self):
> cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=5)
tests/test_lru_cache.py:242:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_cache = 5
after_cleanup_count = 5
def __init__(self, max_cache: int=100, after_cleanup_count: Optional[int]=None) -> None:
self._cache: Dict[K, _LRUNode[K, V]] = {}
self._most_recently_used = None
self._least_recently_used = None
> self._update_max_cache(max_cache, after_cleanup_count)
E AttributeError: 'LRUCache' object has no attribute '_update_max_cache'
dulwich/lru_cache.py:38: AttributeError
test_lru_cache.py::TestLRUCache::test_len
test_lru_cache.py::TestLRUCache::test_len
self =
def test_len(self):
> cache = lru_cache.LRUCache(max_cache=10, after_cleanup_count=10)
tests/test_lru_cache.py:139:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_cache = 10
after_cleanup_count = 10
def __init__(self, max_cache: int=100, after_cleanup_count: Optional[int]=None) -> None:
self._cache: Dict[K, _LRUNode[K, V]] = {}
self._most_recently_used = None
self._least_recently_used = None
> self._update_max_cache(max_cache, after_cleanup_count)
E AttributeError: 'LRUCache' object has no attribute '_update_max_cache'
dulwich/lru_cache.py:38: AttributeError
test_lru_cache.py::TestLRUCache::test_map_None
test_lru_cache.py::TestLRUCache::test_map_None
self =
def test_map_None(self):
# Make sure that we can properly map None as a key.
> cache = lru_cache.LRUCache(max_cache=10)
tests/test_lru_cache.py:53:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_cache = 10
after_cleanup_count = None
def __init__(self, max_cache: int=100, after_cleanup_count: Optional[int]=None) -> None:
self._cache: Dict[K, _LRUNode[K, V]] = {}
self._most_recently_used = None
self._least_recently_used = None
> self._update_max_cache(max_cache, after_cleanup_count)
E AttributeError: 'LRUCache' object has no attribute '_update_max_cache'
dulwich/lru_cache.py:38: AttributeError
test_lru_cache.py::TestLRUCache::test_missing
test_lru_cache.py::TestLRUCache::test_missing
self =
def test_missing(self):
> cache = lru_cache.LRUCache(max_cache=10)
tests/test_lru_cache.py:41:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_cache = 10
after_cleanup_count = None
def __init__(self, max_cache: int=100, after_cleanup_count: Optional[int]=None) -> None:
self._cache: Dict[K, _LRUNode[K, V]] = {}
self._most_recently_used = None
self._least_recently_used = None
> self._update_max_cache(max_cache, after_cleanup_count)
E AttributeError: 'LRUCache' object has no attribute '_update_max_cache'
dulwich/lru_cache.py:38: AttributeError
test_lru_cache.py::TestLRUCache::test_overflow
test_lru_cache.py::TestLRUCache::test_overflow
self =
def test_overflow(self):
"""Adding extra entries will pop out old ones."""
> cache = lru_cache.LRUCache(max_cache=1, after_cleanup_count=1)
tests/test_lru_cache.py:74:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_cache = 1
after_cleanup_count = 1
def __init__(self, max_cache: int=100, after_cleanup_count: Optional[int]=None) -> None:
self._cache: Dict[K, _LRUNode[K, V]] = {}
self._most_recently_used = None
self._least_recently_used = None
> self._update_max_cache(max_cache, after_cleanup_count)
E AttributeError: 'LRUCache' object has no attribute '_update_max_cache'
dulwich/lru_cache.py:38: AttributeError
test_lru_cache.py::TestLRUCache::test_preserve_last_access_order
test_lru_cache.py::TestLRUCache::test_preserve_last_access_order
self =
def test_preserve_last_access_order(self):
> cache = lru_cache.LRUCache(max_cache=5)
tests/test_lru_cache.py:210:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_cache = 5
after_cleanup_count = None
def __init__(self, max_cache: int=100, after_cleanup_count: Optional[int]=None) -> None:
self._cache: Dict[K, _LRUNode[K, V]] = {}
self._most_recently_used = None
self._least_recently_used = None
> self._update_max_cache(max_cache, after_cleanup_count)
E AttributeError: 'LRUCache' object has no attribute '_update_max_cache'
dulwich/lru_cache.py:38: AttributeError
test_lru_cache.py::TestLRUCache::test_resize_larger
test_lru_cache.py::TestLRUCache::test_resize_larger
self =
def test_resize_larger(self):
> cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=4)
tests/test_lru_cache.py:273:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_cache = 5
after_cleanup_count = 4
def __init__(self, max_cache: int=100, after_cleanup_count: Optional[int]=None) -> None:
self._cache: Dict[K, _LRUNode[K, V]] = {}
self._most_recently_used = None
self._least_recently_used = None
> self._update_max_cache(max_cache, after_cleanup_count)
E AttributeError: 'LRUCache' object has no attribute '_update_max_cache'
dulwich/lru_cache.py:38: AttributeError
test_lru_cache.py::TestLRUCache::test_resize_smaller
test_lru_cache.py::TestLRUCache::test_resize_smaller
self =
def test_resize_smaller(self):
> cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=4)
tests/test_lru_cache.py:254:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_cache = 5
after_cleanup_count = 4
def __init__(self, max_cache: int=100, after_cleanup_count: Optional[int]=None) -> None:
self._cache: Dict[K, _LRUNode[K, V]] = {}
self._most_recently_used = None
self._least_recently_used = None
> self._update_max_cache(max_cache, after_cleanup_count)
E AttributeError: 'LRUCache' object has no attribute '_update_max_cache'
dulwich/lru_cache.py:38: AttributeError
test_lru_cache.py::TestLRUSizeCache::test_add__null_key
test_lru_cache.py::TestLRUSizeCache::test_add__null_key
self =
def test_add__null_key(self):
> cache = lru_cache.LRUSizeCache()
tests/test_lru_cache.py:301:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self =
max_size = 1048576, after_cleanup_size = None, compute_size = None
def __init__(self, max_size: int=1024 * 1024, after_cleanup_size: Optional[int]=None, compute_size: Optional[Callable[[V], int]]=None) -> None:
"""Create a new LRUSizeCache.
Args:
max_size: The max number of bytes to store before we start
clearing out entries.
after_cleanup_size: After cleaning up, shrink everything to this
size.
compute_size: A function to compute the size of the values. We
use a function here, so that you can pass 'len' if you are just
using simple strings, or a more complex function if you are using
something like a list of strings, or even a custom object.
The function should take the form "compute_size(value) => integer".
If not supplied, it defaults to 'len()'
"""
self._value_size = 0
if compute_size is None:
self._compute_size = len
else:
self._compute_size = compute_size
> self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
E AttributeError: 'LRUSizeCache' object has no attribute '_update_max_size'
dulwich/lru_cache.py:168: AttributeError
test_lru_cache.py::TestLRUSizeCache::test_add_tracks_size
test_lru_cache.py::TestLRUSizeCache::test_add_tracks_size
self =
def test_add_tracks_size(self):
> cache = lru_cache.LRUSizeCache()
tests/test_lru_cache.py:305:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self =
max_size = 1048576, after_cleanup_size = None, compute_size = None
def __init__(self, max_size: int=1024 * 1024, after_cleanup_size: Optional[int]=None, compute_size: Optional[Callable[[V], int]]=None) -> None:
"""Create a new LRUSizeCache.
Args:
max_size: The max number of bytes to store before we start
clearing out entries.
after_cleanup_size: After cleaning up, shrink everything to this
size.
compute_size: A function to compute the size of the values. We
use a function here, so that you can pass 'len' if you are just
using simple strings, or a more complex function if you are using
something like a list of strings, or even a custom object.
The function should take the form "compute_size(value) => integer".
If not supplied, it defaults to 'len()'
"""
self._value_size = 0
if compute_size is None:
self._compute_size = len
else:
self._compute_size = compute_size
> self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
E AttributeError: 'LRUSizeCache' object has no attribute '_update_max_size'
dulwich/lru_cache.py:168: AttributeError
test_lru_cache.py::TestLRUSizeCache::test_adding_clears_cache_based_on_size
test_lru_cache.py::TestLRUSizeCache::test_adding_clears_cache_based_on_size
self =
def test_adding_clears_cache_based_on_size(self):
"""The cache is cleared in LRU order until small enough."""
> cache = lru_cache.LRUSizeCache(max_size=20)
tests/test_lru_cache.py:359:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_size = 20
after_cleanup_size = None, compute_size = None
def __init__(self, max_size: int=1024 * 1024, after_cleanup_size: Optional[int]=None, compute_size: Optional[Callable[[V], int]]=None) -> None:
"""Create a new LRUSizeCache.
Args:
max_size: The max number of bytes to store before we start
clearing out entries.
after_cleanup_size: After cleaning up, shrink everything to this
size.
compute_size: A function to compute the size of the values. We
use a function here, so that you can pass 'len' if you are just
using simple strings, or a more complex function if you are using
something like a list of strings, or even a custom object.
The function should take the form "compute_size(value) => integer".
If not supplied, it defaults to 'len()'
"""
self._value_size = 0
if compute_size is None:
self._compute_size = len
else:
self._compute_size = compute_size
> self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
E AttributeError: 'LRUSizeCache' object has no attribute '_update_max_size'
dulwich/lru_cache.py:168: AttributeError
test_lru_cache.py::TestLRUSizeCache::test_adding_clears_to_after_cleanup_size
test_lru_cache.py::TestLRUSizeCache::test_adding_clears_to_after_cleanup_size
self =
def test_adding_clears_to_after_cleanup_size(self):
> cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10)
tests/test_lru_cache.py:371:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_size = 20
after_cleanup_size = 10, compute_size = None
def __init__(self, max_size: int=1024 * 1024, after_cleanup_size: Optional[int]=None, compute_size: Optional[Callable[[V], int]]=None) -> None:
"""Create a new LRUSizeCache.
Args:
max_size: The max number of bytes to store before we start
clearing out entries.
after_cleanup_size: After cleaning up, shrink everything to this
size.
compute_size: A function to compute the size of the values. We
use a function here, so that you can pass 'len' if you are just
using simple strings, or a more complex function if you are using
something like a list of strings, or even a custom object.
The function should take the form "compute_size(value) => integer".
If not supplied, it defaults to 'len()'
"""
self._value_size = 0
if compute_size is None:
self._compute_size = len
else:
self._compute_size = compute_size
> self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
E AttributeError: 'LRUSizeCache' object has no attribute '_update_max_size'
dulwich/lru_cache.py:168: AttributeError
test_lru_cache.py::TestLRUSizeCache::test_basic_init
test_lru_cache.py::TestLRUSizeCache::test_basic_init
self =
def test_basic_init(self):
> cache = lru_cache.LRUSizeCache()
tests/test_lru_cache.py:295:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self =
max_size = 1048576, after_cleanup_size = None, compute_size = None
def __init__(self, max_size: int=1024 * 1024, after_cleanup_size: Optional[int]=None, compute_size: Optional[Callable[[V], int]]=None) -> None:
"""Create a new LRUSizeCache.
Args:
max_size: The max number of bytes to store before we start
clearing out entries.
after_cleanup_size: After cleaning up, shrink everything to this
size.
compute_size: A function to compute the size of the values. We
use a function here, so that you can pass 'len' if you are just
using simple strings, or a more complex function if you are using
something like a list of strings, or even a custom object.
The function should take the form "compute_size(value) => integer".
If not supplied, it defaults to 'len()'
"""
self._value_size = 0
if compute_size is None:
self._compute_size = len
else:
self._compute_size = compute_size
> self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
E AttributeError: 'LRUSizeCache' object has no attribute '_update_max_size'
dulwich/lru_cache.py:168: AttributeError
test_lru_cache.py::TestLRUSizeCache::test_cleanup
test_lru_cache.py::TestLRUSizeCache::test_cleanup
self =
def test_cleanup(self):
> cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10)
tests/test_lru_cache.py:401:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_size = 20
after_cleanup_size = 10, compute_size = None
def __init__(self, max_size: int=1024 * 1024, after_cleanup_size: Optional[int]=None, compute_size: Optional[Callable[[V], int]]=None) -> None:
"""Create a new LRUSizeCache.
Args:
max_size: The max number of bytes to store before we start
clearing out entries.
after_cleanup_size: After cleaning up, shrink everything to this
size.
compute_size: A function to compute the size of the values. We
use a function here, so that you can pass 'len' if you are just
using simple strings, or a more complex function if you are using
something like a list of strings, or even a custom object.
The function should take the form "compute_size(value) => integer".
If not supplied, it defaults to 'len()'
"""
self._value_size = 0
if compute_size is None:
self._compute_size = len
else:
self._compute_size = compute_size
> self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
E AttributeError: 'LRUSizeCache' object has no attribute '_update_max_size'
dulwich/lru_cache.py:168: AttributeError
test_lru_cache.py::TestLRUSizeCache::test_custom_sizes
test_lru_cache.py::TestLRUSizeCache::test_custom_sizes
self =
def test_custom_sizes(self):
def size_of_list(lst):
return sum(len(x) for x in lst)
> cache = lru_cache.LRUSizeCache(
max_size=20, after_cleanup_size=10, compute_size=size_of_list
)
tests/test_lru_cache.py:386:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_size = 20
after_cleanup_size = 10
compute_size = .size_of_list at 0x7fd400f16830>
def __init__(self, max_size: int=1024 * 1024, after_cleanup_size: Optional[int]=None, compute_size: Optional[Callable[[V], int]]=None) -> None:
"""Create a new LRUSizeCache.
Args:
max_size: The max number of bytes to store before we start
clearing out entries.
after_cleanup_size: After cleaning up, shrink everything to this
size.
compute_size: A function to compute the size of the values. We
use a function here, so that you can pass 'len' if you are just
using simple strings, or a more complex function if you are using
something like a list of strings, or even a custom object.
The function should take the form "compute_size(value) => integer".
If not supplied, it defaults to 'len()'
"""
self._value_size = 0
if compute_size is None:
self._compute_size = len
else:
self._compute_size = compute_size
> self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
E AttributeError: 'LRUSizeCache' object has no attribute '_update_max_size'
dulwich/lru_cache.py:168: AttributeError
test_lru_cache.py::TestLRUSizeCache::test_keys
test_lru_cache.py::TestLRUSizeCache::test_keys
self =
def test_keys(self):
> cache = lru_cache.LRUSizeCache(max_size=10)
tests/test_lru_cache.py:414:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_size = 10
after_cleanup_size = None, compute_size = None
def __init__(self, max_size: int=1024 * 1024, after_cleanup_size: Optional[int]=None, compute_size: Optional[Callable[[V], int]]=None) -> None:
"""Create a new LRUSizeCache.
Args:
max_size: The max number of bytes to store before we start
clearing out entries.
after_cleanup_size: After cleaning up, shrink everything to this
size.
compute_size: A function to compute the size of the values. We
use a function here, so that you can pass 'len' if you are just
using simple strings, or a more complex function if you are using
something like a list of strings, or even a custom object.
The function should take the form "compute_size(value) => integer".
If not supplied, it defaults to 'len()'
"""
self._value_size = 0
if compute_size is None:
self._compute_size = len
else:
self._compute_size = compute_size
> self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
E AttributeError: 'LRUSizeCache' object has no attribute '_update_max_size'
dulwich/lru_cache.py:168: AttributeError
test_lru_cache.py::TestLRUSizeCache::test_no_add_over_size
test_lru_cache.py::TestLRUSizeCache::test_no_add_over_size
self =
def test_no_add_over_size(self):
"""Adding a large value may not be cached at all."""
> cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=5)
tests/test_lru_cache.py:321:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_size = 10
after_cleanup_size = 5, compute_size = None
def __init__(self, max_size: int=1024 * 1024, after_cleanup_size: Optional[int]=None, compute_size: Optional[Callable[[V], int]]=None) -> None:
"""Create a new LRUSizeCache.
Args:
max_size: The max number of bytes to store before we start
clearing out entries.
after_cleanup_size: After cleaning up, shrink everything to this
size.
compute_size: A function to compute the size of the values. We
use a function here, so that you can pass 'len' if you are just
using simple strings, or a more complex function if you are using
something like a list of strings, or even a custom object.
The function should take the form "compute_size(value) => integer".
If not supplied, it defaults to 'len()'
"""
self._value_size = 0
if compute_size is None:
self._compute_size = len
else:
self._compute_size = compute_size
> self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
E AttributeError: 'LRUSizeCache' object has no attribute '_update_max_size'
dulwich/lru_cache.py:168: AttributeError
test_lru_cache.py::TestLRUSizeCache::test_no_add_over_size_cleanup
test_lru_cache.py::TestLRUSizeCache::test_no_add_over_size_cleanup
self =
def test_no_add_over_size_cleanup(self):
"""If a large value is not cached, we will call cleanup right away."""
cleanup_calls = []
def cleanup(key, value):
cleanup_calls.append((key, value))
> cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=5)
tests/test_lru_cache.py:347:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_size = 10
after_cleanup_size = 5, compute_size = None
def __init__(self, max_size: int=1024 * 1024, after_cleanup_size: Optional[int]=None, compute_size: Optional[Callable[[V], int]]=None) -> None:
"""Create a new LRUSizeCache.
Args:
max_size: The max number of bytes to store before we start
clearing out entries.
after_cleanup_size: After cleaning up, shrink everything to this
size.
compute_size: A function to compute the size of the values. We
use a function here, so that you can pass 'len' if you are just
using simple strings, or a more complex function if you are using
something like a list of strings, or even a custom object.
The function should take the form "compute_size(value) => integer".
If not supplied, it defaults to 'len()'
"""
self._value_size = 0
if compute_size is None:
self._compute_size = len
else:
self._compute_size = compute_size
> self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
E AttributeError: 'LRUSizeCache' object has no attribute '_update_max_size'
dulwich/lru_cache.py:168: AttributeError
test_lru_cache.py::TestLRUSizeCache::test_remove_tracks_size
test_lru_cache.py::TestLRUSizeCache::test_remove_tracks_size
self =
def test_remove_tracks_size(self):
> cache = lru_cache.LRUSizeCache()
tests/test_lru_cache.py:311:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self =
max_size = 1048576, after_cleanup_size = None, compute_size = None
def __init__(self, max_size: int=1024 * 1024, after_cleanup_size: Optional[int]=None, compute_size: Optional[Callable[[V], int]]=None) -> None:
"""Create a new LRUSizeCache.
Args:
max_size: The max number of bytes to store before we start
clearing out entries.
after_cleanup_size: After cleaning up, shrink everything to this
size.
compute_size: A function to compute the size of the values. We
use a function here, so that you can pass 'len' if you are just
using simple strings, or a more complex function if you are using
something like a list of strings, or even a custom object.
The function should take the form "compute_size(value) => integer".
If not supplied, it defaults to 'len()'
"""
self._value_size = 0
if compute_size is None:
self._compute_size = len
else:
self._compute_size = compute_size
> self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
E AttributeError: 'LRUSizeCache' object has no attribute '_update_max_size'
dulwich/lru_cache.py:168: AttributeError
test_lru_cache.py::TestLRUSizeCache::test_resize_larger
test_lru_cache.py::TestLRUSizeCache::test_resize_larger
self =
def test_resize_larger(self):
> cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=9)
tests/test_lru_cache.py:439:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_size = 10
after_cleanup_size = 9, compute_size = None
def __init__(self, max_size: int=1024 * 1024, after_cleanup_size: Optional[int]=None, compute_size: Optional[Callable[[V], int]]=None) -> None:
"""Create a new LRUSizeCache.
Args:
max_size: The max number of bytes to store before we start
clearing out entries.
after_cleanup_size: After cleaning up, shrink everything to this
size.
compute_size: A function to compute the size of the values. We
use a function here, so that you can pass 'len' if you are just
using simple strings, or a more complex function if you are using
something like a list of strings, or even a custom object.
The function should take the form "compute_size(value) => integer".
If not supplied, it defaults to 'len()'
"""
self._value_size = 0
if compute_size is None:
self._compute_size = len
else:
self._compute_size = compute_size
> self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
E AttributeError: 'LRUSizeCache' object has no attribute '_update_max_size'
dulwich/lru_cache.py:168: AttributeError
test_lru_cache.py::TestLRUSizeCache::test_resize_smaller
test_lru_cache.py::TestLRUSizeCache::test_resize_smaller
self =
def test_resize_smaller(self):
> cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=9)
tests/test_lru_cache.py:422:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = , max_size = 10
after_cleanup_size = 9, compute_size = None
def __init__(self, max_size: int=1024 * 1024, after_cleanup_size: Optional[int]=None, compute_size: Optional[Callable[[V], int]]=None) -> None:
"""Create a new LRUSizeCache.
Args:
max_size: The max number of bytes to store before we start
clearing out entries.
after_cleanup_size: After cleaning up, shrink everything to this
size.
compute_size: A function to compute the size of the values. We
use a function here, so that you can pass 'len' if you are just
using simple strings, or a more complex function if you are using
something like a list of strings, or even a custom object.
The function should take the form "compute_size(value) => integer".
If not supplied, it defaults to 'len()'
"""
self._value_size = 0
if compute_size is None:
self._compute_size = len
else:
self._compute_size = compute_size
> self._update_max_size(max_size, after_cleanup_size=after_cleanup_size)
E AttributeError: 'LRUSizeCache' object has no attribute '_update_max_size'
dulwich/lru_cache.py:168: AttributeError
test_mailmap.py::ReadMailmapTests::test_read
test_mailmap.py::ReadMailmapTests::test_read
self =
def test_read(self):
b = BytesIO(
b"""\
Jane Doe
Joe R. Developer
# A comment
# Comment
Some Dude nick1
Other Author nick2
Other Author
Santa Claus
"""
)
self.assertEqual(
[
((b"Jane Doe", b"jane@desktop.(none)"), None),
((b"Joe R. Developer", b"joe@example.com"), None),
((None, b"cto@company.xx"), (None, b"cto@coompany.xx")),
(
(b"Some Dude", b"some@dude.xx"),
(b"nick1", b"bugs@company.xx"),
),
(
(b"Other Author", b"other@author.xx"),
(b"nick2", b"bugs@company.xx"),
),
(
(b"Other Author", b"other@author.xx"),
(None, b"nick2@company.xx"),
),
(
(b"Santa Claus", b"santa.claus@northpole.xx"),
(None, b"me@company.xx"),
),
],
> list(read_mailmap(b)),
)
E TypeError: 'NoneType' object is not iterable
tests/test_mailmap.py:65: TypeError
test_mailmap.py::MailmapTests::test_lookup
test_mailmap.py::MailmapTests::test_lookup
self =
def test_lookup(self):
m = Mailmap()
m.add_entry((b"Jane Doe", b"jane@desktop.(none)"), (None, None))
m.add_entry((b"Joe R. Developer", b"joe@example.com"), None)
m.add_entry((None, b"cto@company.xx"), (None, b"cto@coompany.xx"))
m.add_entry((b"Some Dude", b"some@dude.xx"), (b"nick1", b"bugs@company.xx"))
m.add_entry(
(b"Other Author", b"other@author.xx"),
(b"nick2", b"bugs@company.xx"),
)
m.add_entry((b"Other Author", b"other@author.xx"), (None, b"nick2@company.xx"))
m.add_entry(
(b"Santa Claus", b"santa.claus@northpole.xx"),
(None, b"me@company.xx"),
)
> self.assertEqual(
b"Jane Doe ",
m.lookup(b"Jane Doe "),
)
E AssertionError: b'Jane Doe ' != None
tests/test_mailmap.py:85: AssertionError
test_protocol.py::ProtocolTests::test_eof
test_protocol.py::ProtocolTests::test_eof
self =
def test_eof(self):
self.rin.write(b"0000")
self.rin.seek(0)
self.assertFalse(self.proto.eof())
self.assertEqual(None, self.proto.read_pkt_line())
> self.assertTrue(self.proto.eof())
E AssertionError: None is not true
tests/test_protocol.py:62: AssertionError
test_protocol.py::ProtocolTests::test_read_cmd
test_protocol.py::ProtocolTests::test_read_cmd
self =
def test_read_cmd(self):
self.rin.write(b"0012cmd arg1\x00arg2\x00")
self.rin.seek(0)
> self.assertEqual((b"cmd", [b"arg1", b"arg2"]), self.proto.read_cmd())
E AssertionError: (b'cmd', [b'arg1', b'arg2']) != None
tests/test_protocol.py:101: AssertionError
test_protocol.py::ProtocolTests::test_read_cmd_noend0
test_protocol.py::ProtocolTests::test_read_cmd_noend0
self =
def test_read_cmd_noend0(self):
self.rin.write(b"0011cmd arg1\x00arg2")
self.rin.seek(0)
> self.assertRaises(AssertionError, self.proto.read_cmd)
E AssertionError: AssertionError not raised by read_cmd
tests/test_protocol.py:106: AssertionError
test_protocol.py::ProtocolTests::test_read_pkt_line
test_protocol.py::ProtocolTests::test_read_pkt_line
self =
def test_read_pkt_line(self):
self.rin.write(b"0008cmd ")
self.rin.seek(0)
> self.assertEqual(b"cmd ", self.proto.read_pkt_line())
E AssertionError: b'cmd ' != None
tests/test_protocol.py:55: AssertionError
test_protocol.py::ProtocolTests::test_read_pkt_line_wrong_size
test_protocol.py::ProtocolTests::test_read_pkt_line_wrong_size
self =
def test_read_pkt_line_wrong_size(self):
self.rin.write(b"0100too short")
self.rin.seek(0)
> self.assertRaises(GitProtocolError, self.proto.read_pkt_line)
E AssertionError: GitProtocolError not raised by read_pkt_line
tests/test_protocol.py:88: AssertionError
test_protocol.py::ProtocolTests::test_read_pkt_seq
test_protocol.py::ProtocolTests::test_read_pkt_seq
self =
def test_read_pkt_seq(self):
self.rin.write(b"0008cmd 0005l0000")
self.rin.seek(0)
> self.assertEqual([b"cmd ", b"l"], list(self.proto.read_pkt_seq()))
E TypeError: 'NoneType' object is not iterable
tests/test_protocol.py:78: TypeError
test_protocol.py::ProtocolTests::test_send_cmd
test_protocol.py::ProtocolTests::test_send_cmd
self =
def test_send_cmd(self):
self.proto.send_cmd(b"fetch", b"a", b"b")
> self.assertEqual(self.rout.getvalue(), b"000efetch a\x00b\x00")
E AssertionError: b'' != b'000efetch a\x00b\x00'
tests/test_protocol.py:96: AssertionError
test_protocol.py::ProtocolTests::test_unread_pkt_line
test_protocol.py::ProtocolTests::test_unread_pkt_line
self =
def test_unread_pkt_line(self):
self.rin.write(b"0007foo0000")
self.rin.seek(0)
> self.assertEqual(b"foo", self.proto.read_pkt_line())
E AssertionError: b'foo' != None
tests/test_protocol.py:68: AssertionError
test_protocol.py::ProtocolTests::test_write_pkt_line
test_protocol.py::ProtocolTests::test_write_pkt_line
self =
def test_write_pkt_line(self):
self.proto.write_pkt_line(b"bla")
> self.assertEqual(self.rout.getvalue(), b"0007bla")
E AssertionError: b'' != b'0007bla'
tests/test_protocol.py:50: AssertionError
test_protocol.py::ProtocolTests::test_write_pkt_line_none
test_protocol.py::ProtocolTests::test_write_pkt_line_none
self =
def test_write_pkt_line_none(self):
self.proto.write_pkt_line(None)
> self.assertEqual(self.rout.getvalue(), b"0000")
E AssertionError: b'' != b'0000'
tests/test_protocol.py:46: AssertionError
test_protocol.py::ProtocolTests::test_write_sideband
test_protocol.py::ProtocolTests::test_write_sideband
self =
def test_write_sideband(self):
self.proto.write_sideband(3, b"bloe")
> self.assertEqual(self.rout.getvalue(), b"0009\x03bloe")
E AssertionError: b'' != b'0009\x03bloe'
tests/test_protocol.py:92: AssertionError
test_protocol.py::ReceivableProtocolTests::test_eof
test_protocol.py::ReceivableProtocolTests::test_eof
self =
def setUp(self):
TestCase.setUp(self)
self.rout = BytesIO()
self.rin = ReceivableBytesIO()
> self.proto = ReceivableProtocol(self.rin.recv, self.rout.write)
tests/test_protocol.py:140:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self =
recv = >
write =
close = None, report_activity = None, rbufsize = 8192
def __init__(self, recv, write, close=None, report_activity=None, rbufsize=_RBUFSIZE) -> None:
> super().__init__(self.read, write, close=close, report_activity=report_activity)
E AttributeError: 'ReceivableProtocol' object has no attribute 'read'
dulwich/protocol.py:182: AttributeError
test_protocol.py::ReceivableProtocolTests::test_mixed
test_protocol.py::ReceivableProtocolTests::test_mixed
self =
def setUp(self):
TestCase.setUp(self)
self.rout = BytesIO()
self.rin = ReceivableBytesIO()
> self.proto = ReceivableProtocol(self.rin.recv, self.rout.write)
tests/test_protocol.py:140:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self =
recv = >
write =
close = None, report_activity = None, rbufsize = 8192
def __init__(self, recv, write, close=None, report_activity=None, rbufsize=_RBUFSIZE) -> None:
> super().__init__(self.read, write, close=close, report_activity=report_activity)
E AttributeError: 'ReceivableProtocol' object has no attribute 'read'
dulwich/protocol.py:182: AttributeError
test_protocol.py::ReceivableProtocolTests::test_read_cmd
test_protocol.py::ReceivableProtocolTests::test_read_cmd
self =
def setUp(self):
TestCase.setUp(self)
self.rout = BytesIO()
self.rin = ReceivableBytesIO()
> self.proto = ReceivableProtocol(self.rin.recv, self.rout.write)
tests/test_protocol.py:140:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self =
recv = >
write =
close = None, report_activity = None, rbufsize = 8192
def __init__(self, recv, write, close=None, report_activity=None, rbufsize=_RBUFSIZE) -> None:
> super().__init__(self.read, write, close=close, report_activity=report_activity)
E AttributeError: 'ReceivableProtocol' object has no attribute 'read'
dulwich/protocol.py:182: AttributeError
test_protocol.py::ReceivableProtocolTests::test_read_cmd_noend0
test_protocol.py::ReceivableProtocolTests::test_read_cmd_noend0
self =
def setUp(self):
TestCase.setUp(self)
self.rout = BytesIO()
self.rin = ReceivableBytesIO()
> self.proto = ReceivableProtocol(self.rin.recv, self.rout.write)
tests/test_protocol.py:140:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self =
recv = >
write =
close = None, report_activity = None, rbufsize = 8192
def __init__(self, recv, write, close=None, report_activity=None, rbufsize=_RBUFSIZE) -> None:
> super().__init__(self.read, write, close=close, report_activity=report_activity)
E AttributeError: 'ReceivableProtocol' object has no attribute 'read'
dulwich/protocol.py:182: AttributeError
test_protocol.py::ReceivableProtocolTests::test_read_pkt_line
test_protocol.py::ReceivableProtocolTests::test_read_pkt_line
self =
def setUp(self):
TestCase.setUp(self)
self.rout = BytesIO()
self.rin = ReceivableBytesIO()
> self.proto = ReceivableProtocol(self.rin.recv, self.rout.write)
tests/test_protocol.py:140:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self =
recv = >
write =
close = None, report_activity = None, rbufsize = 8192
def __init__(self, recv, write, close=None, report_activity=None, rbufsize=_RBUFSIZE) -> None:
> super().__init__(self.read, write, close=close, report_activity=report_activity)
E AttributeError: 'ReceivableProtocol' object has no attribute 'read'
dulwich/protocol.py:182: AttributeError
test_protocol.py::ReceivableProtocolTests::test_read_pkt_line_none
test_protocol.py::ReceivableProtocolTests::test_read_pkt_line_none
self =
def setUp(self):
TestCase.setUp(self)
self.rout = BytesIO()
self.rin = ReceivableBytesIO()
> self.proto = ReceivableProtocol(self.rin.recv, self.rout.write)
tests/test_protocol.py:140:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self =
recv = >
write =
close = None, report_activity = None, rbufsize = 8192
def __init__(self, recv, write, close=None, report_activity=None, rbufsize=_RBUFSIZE) -> None:
> super().__init__(self.read, write, close=close, report_activity=report_activity)
E AttributeError: 'ReceivableProtocol' object has no attribute 'read'
dulwich/protocol.py:182: AttributeError
test_protocol.py::ReceivableProtocolTests::test_read_pkt_line_wrong_size
test_protocol.py::ReceivableProtocolTests::test_read_pkt_line_wrong_size
self =
def setUp(self):
TestCase.setUp(self)
self.rout = BytesIO()
self.rin = ReceivableBytesIO()
> self.proto = ReceivableProtocol(self.rin.recv, self.rout.write)
tests/test_protocol.py:140:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self =
recv = >
write =
close = None, report_activity = None, rbufsize = 8192
def __init__(self, recv, write, close=None, report_activity=None, rbufsize=_RBUFSIZE) -> None:
> super().__init__(self.read, write, close=close, report_activity=report_activity)
E AttributeError: 'ReceivableProtocol' object has no attribute 'read'
dulwich/protocol.py:182: AttributeError
test_protocol.py::ReceivableProtocolTests::test_read_pkt_seq
test_protocol.py::ReceivableProtocolTests::test_read_pkt_seq
self =
def setUp(self):
TestCase.setUp(self)
self.rout = BytesIO()
self.rin = ReceivableBytesIO()
> self.proto = ReceivableProtocol(self.rin.recv, self.rout.write)
tests/test_protocol.py:140:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self =
recv = >
write =
close = None, report_activity = None, rbufsize = 8192
def __init__(self, recv, write, close=None, report_activity=None, rbufsize=_RBUFSIZE) -> None:
> super().__init__(self.read, write, close=close, report_activity=report_activity)
E AttributeError: 'ReceivableProtocol' object has no attribute 'read'
dulwich/protocol.py:182: AttributeError
test_protocol.py::ReceivableProtocolTests::test_read_recv
test_protocol.py::ReceivableProtocolTests::test_read_recv
self =
def setUp(self):
TestCase.setUp(self)
self.rout = BytesIO()
self.rin = ReceivableBytesIO()
> self.proto = ReceivableProtocol(self.rin.recv, self.rout.write)
tests/test_protocol.py:140:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self =
recv = >
write =
close = None, report_activity = None, rbufsize = 8192
def __init__(self, recv, write, close=None, report_activity=None, rbufsize=_RBUFSIZE) -> None:
> super().__init__(self.read, write, close=close, report_activity=report_activity)
E AttributeError: 'ReceivableProtocol' object has no attribute 'read'
dulwich/protocol.py:182: AttributeError
test_protocol.py::ReceivableProtocolTests::test_recv
test_protocol.py::ReceivableProtocolTests::test_recv
self =
def setUp(self):
TestCase.setUp(self)
self.rout = BytesIO()
self.rin = ReceivableBytesIO()
> self.proto = ReceivableProtocol(self.rin.recv, self.rout.write)
tests/test_protocol.py:140:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self =
recv = >
write =
close = None, report_activity = None, rbufsize = 8192
def __init__(self, recv, write, close=None, report_activity=None, rbufsize=_RBUFSIZE) -> None:
> super().__init__(self.read, write, close=close, report_activity=report_activity)
E AttributeError: 'ReceivableProtocol' object has no attribute 'read'
dulwich/protocol.py:182: AttributeError
test_protocol.py::ReceivableProtocolTests::test_recv_read
test_protocol.py::ReceivableProtocolTests::test_recv_read
self =
def setUp(self):
TestCase.setUp(self)
self.rout = BytesIO()
self.rin = ReceivableBytesIO()
> self.proto = ReceivableProtocol(self.rin.recv, self.rout.write)
tests/test_protocol.py:140:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self =
recv = >
write =
close = None, report_activity = None, rbufsize = 8192
def __init__(self, recv, write, close=None, report_activity=None, rbufsize=_RBUFSIZE) -> None:
> super().__init__(self.read, write, close=close, report_activity=report_activity)
E AttributeError: 'ReceivableProtocol' object has no attribute 'read'
dulwich/protocol.py:182: AttributeError
test_protocol.py::ReceivableProtocolTests::test_send_cmd
test_protocol.py::ReceivableProtocolTests::test_send_cmd
self =
def setUp(self):
TestCase.setUp(self)
self.rout = BytesIO()
self.rin = ReceivableBytesIO()
> self.proto = ReceivableProtocol(self.rin.recv, self.rout.write)
tests/test_protocol.py:140:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self =
recv = >
write =
close = None, report_activity = None, rbufsize = 8192
def __init__(self, recv, write, close=None, report_activity=None, rbufsize=_RBUFSIZE) -> None:
> super().__init__(self.read, write, close=close, report_activity=report_activity)
E AttributeError: 'ReceivableProtocol' object has no attribute 'read'
dulwich/protocol.py:182: AttributeError
test_protocol.py::ReceivableProtocolTests::test_unread_pkt_line
test_protocol.py::ReceivableProtocolTests::test_unread_pkt_line
self =
def setUp(self):
TestCase.setUp(self)
self.rout = BytesIO()
self.rin = ReceivableBytesIO()
> self.proto = ReceivableProtocol(self.rin.recv, self.rout.write)
tests/test_protocol.py:140:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self =
recv = >
write =
close = None, report_activity = None, rbufsize = 8192
def __init__(self, recv, write, close=None, report_activity=None, rbufsize=_RBUFSIZE) -> None:
> super().__init__(self.read, write, close=close, report_activity=report_activity)
E AttributeError: 'ReceivableProtocol' object has no attribute 'read'
dulwich/protocol.py:182: AttributeError
test_protocol.py::ReceivableProtocolTests::test_write_pkt_line
test_protocol.py::ReceivableProtocolTests::test_write_pkt_line
self =
def setUp(self):
TestCase.setUp(self)
self.rout = BytesIO()
self.rin = ReceivableBytesIO()
> self.proto = ReceivableProtocol(self.rin.recv, self.rout.write)
tests/test_protocol.py:140:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self =
recv = >
write =
close = None, report_activity = None, rbufsize = 8192
def __init__(self, recv, write, close=None, report_activity=None, rbufsize=_RBUFSIZE) -> None:
> super().__init__(self.read, write, close=close, report_activity=report_activity)
E AttributeError: 'ReceivableProtocol' object has no attribute 'read'
dulwich/protocol.py:182: AttributeError
test_protocol.py::ReceivableProtocolTests::test_write_pkt_line_none
test_protocol.py::ReceivableProtocolTests::test_write_pkt_line_none
self =
def setUp(self):
TestCase.setUp(self)
self.rout = BytesIO()
self.rin = ReceivableBytesIO()
> self.proto = ReceivableProtocol(self.rin.recv, self.rout.write)
tests/test_protocol.py:140:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self =
recv = >
write =
close = None, report_activity = None, rbufsize = 8192
def __init__(self, recv, write, close=None, report_activity=None, rbufsize=_RBUFSIZE) -> None:
> super().__init__(self.read, write, close=close, report_activity=report_activity)
E AttributeError: 'ReceivableProtocol' object has no attribute 'read'
dulwich/protocol.py:182: AttributeError
test_protocol.py::ReceivableProtocolTests::test_write_sideband
test_protocol.py::ReceivableProtocolTests::test_write_sideband
self =
def setUp(self):
TestCase.setUp(self)
self.rout = BytesIO()
self.rin = ReceivableBytesIO()
> self.proto = ReceivableProtocol(self.rin.recv, self.rout.write)
tests/test_protocol.py:140:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self =
recv = >
write =
close = None, report_activity = None, rbufsize = 8192
def __init__(self, recv, write, close=None, report_activity=None, rbufsize=_RBUFSIZE) -> None:
> super().__init__(self.read, write, close=close, report_activity=report_activity)
E AttributeError: 'ReceivableProtocol' object has no attribute 'read'
dulwich/protocol.py:182: AttributeError
test_protocol.py::CapabilitiesTestCase::test_ack_type
test_protocol.py::CapabilitiesTestCase::test_ack_type
self =
def test_ack_type(self):
> self.assertEqual(SINGLE_ACK, ack_type([b"foo", b"bar"]))
E AssertionError: 0 != None
tests/test_protocol.py:232: AssertionError
test_protocol.py::CapabilitiesTestCase::test_caps
test_protocol.py::CapabilitiesTestCase::test_caps
self =
def test_caps(self):
> self.assertEqual((b"bla", [b"la"]), extract_capabilities(b"bla\0la"))
E AssertionError: (b'bla', [b'la']) != None
tests/test_protocol.py:210: AssertionError
test_protocol.py::CapabilitiesTestCase::test_caps_want_line
test_protocol.py::CapabilitiesTestCase::test_caps_want_line
self =
def test_caps_want_line(self):
> self.assertEqual(
(b"want bla", [b"la"]),
extract_want_line_capabilities(b"want bla la"),
)
E AssertionError: (b'want bla', [b'la']) != None
tests/test_protocol.py:218: AssertionError
test_protocol.py::CapabilitiesTestCase::test_plain
test_protocol.py::CapabilitiesTestCase::test_plain
self =
def test_plain(self):
> self.assertEqual((b"bla", []), extract_capabilities(b"bla"))
E AssertionError: (b'bla', []) != None
tests/test_protocol.py:207: AssertionError
test_protocol.py::CapabilitiesTestCase::test_plain_want_line
test_protocol.py::CapabilitiesTestCase::test_plain_want_line
self =
def test_plain_want_line(self):
> self.assertEqual((b"want bla", []), extract_want_line_capabilities(b"want bla"))
E AssertionError: (b'want bla', []) != None
tests/test_protocol.py:215: AssertionError
test_protocol.py::BufferedPktLineWriterTests::test_write
test_protocol.py::BufferedPktLineWriterTests::test_write
self =
def test_write(self):
self._writer.write(b"foo")
self.assertOutputEquals(b"")
self._writer.flush()
> self.assertOutputEquals(b"0007foo")
tests/test_protocol.py:262:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
tests/test_protocol.py:252: in assertOutputEquals
self.assertEqual(expected, self._output.getvalue())
E AssertionError: b'0007foo' != b''
test_protocol.py::BufferedPktLineWriterTests::test_write_across_boundary
test_protocol.py::BufferedPktLineWriterTests::test_write_across_boundary
self =
def test_write_across_boundary(self):
self._writer.write(b"foo")
self._writer.write(b"barbaz")
> self.assertOutputEquals(b"0007foo000abarba")
tests/test_protocol.py:284:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
tests/test_protocol.py:252: in assertOutputEquals
self.assertEqual(expected, self._output.getvalue())
E AssertionError: b'0007foo000abarba' != b''
test_protocol.py::BufferedPktLineWriterTests::test_write_multiple
test_protocol.py::BufferedPktLineWriterTests::test_write_multiple
self =
def test_write_multiple(self):
self._writer.write(b"foo")
self._writer.write(b"bar")
self.assertOutputEquals(b"")
self._writer.flush()
> self.assertOutputEquals(b"0007foo0007bar")
tests/test_protocol.py:279:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
tests/test_protocol.py:252: in assertOutputEquals
self.assertEqual(expected, self._output.getvalue())
E AssertionError: b'0007foo0007bar' != b''
test_protocol.py::BufferedPktLineWriterTests::test_write_none
test_protocol.py::BufferedPktLineWriterTests::test_write_none
self =
def test_write_none(self):
self._writer.write(None)
self.assertOutputEquals(b"")
self._writer.flush()
> self.assertOutputEquals(b"0000")
tests/test_protocol.py:268:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
tests/test_protocol.py:252: in assertOutputEquals
self.assertEqual(expected, self._output.getvalue())
E AssertionError: b'0000' != b''
test_protocol.py::BufferedPktLineWriterTests::test_write_to_boundary
test_protocol.py::BufferedPktLineWriterTests::test_write_to_boundary
self =
def test_write_to_boundary(self):
self._writer.write(b"foo")
self._writer.write(b"barba")
> self.assertOutputEquals(b"0007foo0009barba")
tests/test_protocol.py:292:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
tests/test_protocol.py:252: in assertOutputEquals
self.assertEqual(expected, self._output.getvalue())
E AssertionError: b'0007foo0009barba' != b''
test_protocol.py::PktLineParserTests::test_multiple_packets
test_protocol.py::PktLineParserTests::test_multiple_packets
self =
def test_multiple_packets(self):
pktlines = []
parser = PktLineParser(pktlines.append)
parser.parse(b"0005z0006aba")
> self.assertEqual(pktlines, [b"z", b"ab"])
E AssertionError: Lists differ: [] != [b'z', b'ab']
E
E Second list contains 2 additional elements.
E First extra element 0:
E b'z'
E
E - []
E + [b'z', b'ab']
tests/test_protocol.py:320: AssertionError
test_protocol.py::PktLineParserTests::test_none
test_protocol.py::PktLineParserTests::test_none
self =
def test_none(self):
pktlines = []
parser = PktLineParser(pktlines.append)
parser.parse(b"0000")
> self.assertEqual(pktlines, [None])
E AssertionError: Lists differ: [] != [None]
E
E Second list contains 1 additional elements.
E First extra element 0:
E None
E
E - []
E + [None]
tests/test_protocol.py:304: AssertionError
test_protocol.py::PktLineParserTests::test_small_fragments
test_protocol.py::PktLineParserTests::test_small_fragments
self =
def test_small_fragments(self):
pktlines = []
parser = PktLineParser(pktlines.append)
parser.parse(b"00")
parser.parse(b"05")
parser.parse(b"z0000")
> self.assertEqual(pktlines, [b"z", None])
E AssertionError: Lists differ: [] != [b'z', None]
E
E Second list contains 2 additional elements.
E First extra element 0:
E b'z'
E
E - []
E + [b'z', None]
tests/test_protocol.py:313: AssertionError
Patch diff
diff --git a/dulwich/objects.py b/dulwich/objects.py
index 2f7197ce..4a03ebf8 100644
--- a/dulwich/objects.py
+++ b/dulwich/objects.py
@@ -56,7 +56,7 @@ def hex_to_filename(path, hex):
def filename_to_hex(filename):
"""Takes an object filename and returns its corresponding hex sha."""
- pass
+ return os.path.basename(os.path.dirname(filename)) + os.path.basename(filename)
def object_header(num_type: int, length: int) -> bytes:
"""Return an object header for the given numeric type and text length."""
@@ -85,7 +85,8 @@ def check_hexsha(hex, error_msg):
Raises:
ObjectFormatException: Raised when the string is not valid
"""
- pass
+ if len(hex) != 40 or not all(c in '0123456789abcdefABCDEF' for c in hex):
+ raise ObjectFormatException(error_msg)
def check_identity(identity: bytes, error_msg: str) -> None:
"""Check if the specified identity is valid.
@@ -96,7 +97,8 @@ def check_identity(identity: bytes, error_msg: str) -> None:
identity: Identity string
error_msg: Error message to use in exception
"""
- pass
+ if b'<' not in identity or b'>' not in identity or b'@' not in identity:
+ raise ObjectFormatException(error_msg)
def check_time(time_seconds):
"""Check if the specified time is not prone to overflow error.
@@ -107,11 +109,12 @@ def check_time(time_seconds):
time_seconds: time in seconds
"""
- pass
+ if time_seconds < 0 or time_seconds > MAX_TIME:
+ raise ObjectFormatException(f"Invalid time {time_seconds}")
def git_line(*items):
"""Formats items into a space separated line."""
- pass
+ return b' '.join(i.encode('utf-8') if isinstance(i, str) else i for i in items)
class FixedSha:
"""SHA object that behaves like hashlib's but is given a fixed value."""
@@ -127,11 +130,11 @@ class FixedSha:
def digest(self) -> bytes:
"""Return the raw SHA digest."""
- pass
+ return self._sha
def hexdigest(self) -> str:
"""Return the hex SHA digest."""
- pass
+ return self._hexsha.decode('ascii')
class ShaFile:
"""A git SHA file."""
@@ -142,84 +145,77 @@ class ShaFile:
_chunked_text: Optional[List[bytes]]
_sha: Union[FixedSha, None, 'HASH']
+ @staticmethod
@staticmethod
def _parse_legacy_object_header(magic, f: BinaryIO) -> 'ShaFile':
"""Parse a legacy object, creating it but not reading the file."""
- pass
+ word = magic + f.read(4)
+ type_name = word.split(b' ')[0]
+ obj_class = object_class(type_name)
+ if not obj_class:
+ raise ObjectFormatException(f"Not a known type: {type_name!r}")
+ return obj_class()
def _parse_legacy_object(self, map) -> None:
"""Parse a legacy object, setting the raw string."""
- pass
+ text = zlib.decompress(map)
+ header_end = text.index(b'\0')
+ self.set_raw_string(text[header_end+1:])
def as_legacy_object_chunks(self, compression_level: int=-1) -> Iterator[bytes]:
"""Return chunks representing the object in the experimental format.
Returns: List of strings
"""
- pass
+ data = self.as_raw_string()
+ header = object_header(self.type_num, len(data))
+ yield zlib.compress(header + data, compression_level)
def as_legacy_object(self, compression_level: int=-1) -> bytes:
"""Return string representing the object in the experimental format."""
- pass
+ def as_legacy_object(self, compression_level: int=-1) -> bytes:
+ """Return string representing the object in the experimental format."""
+ return b''.join(self.as_legacy_object_chunks(compression_level))
def as_raw_chunks(self) -> List[bytes]:
- """Return chunks with serialization of the object.
-
- Returns: List of strings, not necessarily one per line
- """
- pass
+ """Return chunks with serialization of the object."""
+ if self._chunked_text is None:
+ self._chunked_text = self._serialize()
+ return self._chunked_text
def as_raw_string(self) -> bytes:
- """Return raw string with serialization of the object.
-
- Returns: String object
- """
- pass
-
- def __bytes__(self) -> bytes:
- """Return raw string serialization of this object."""
- return self.as_raw_string()
-
- def __hash__(self):
- """Return unique hash for this object."""
- return hash(self.id)
-
- def as_pretty_string(self) -> str:
- """Return a string representing this object, fit for display."""
- pass
+ """Return raw string with serialization of the object."""
+ return b''.join(self.as_raw_chunks())
def set_raw_string(self, text: bytes, sha: Optional[ObjectID]=None) -> None:
"""Set the contents of this object from a serialized string."""
- pass
+ self._chunked_text = [text]
+ self._sha = sha
+ self._needs_serialization = False
def set_raw_chunks(self, chunks: List[bytes], sha: Optional[ObjectID]=None) -> None:
"""Set the contents of this object from a list of chunks."""
- pass
-
- @staticmethod
- def _parse_object_header(magic, f):
- """Parse a new style object, creating it but not reading the file."""
- pass
-
- def _parse_object(self, map) -> None:
- """Parse a new style object, setting self._text."""
- pass
-
- def __init__(self) -> None:
- """Don't call this directly."""
- self._sha = None
- self._chunked_text = []
- self._needs_serialization = True
+ self._chunked_text = chunks
+ self._sha = sha
+ self._needs_serialization = False
@classmethod
def from_path(cls, path):
"""Open a SHA file from disk."""
- pass
+ with open(path, 'rb') as f:
+ return cls.from_file(f)
@classmethod
def from_file(cls, f):
"""Get the contents of a SHA file on disk."""
- pass
+ obj = cls()
+ obj._parse_legacy_object(f.read())
+ return obj
+
+ @classmethod
+ def from_string(cls, string):
+ """Create a ShaFile from a string."""
+ return cls.from_raw_string(cls.type_num, string)
@staticmethod
def from_raw_string(type_num, string, sha=None):
@@ -230,7 +226,9 @@ class ShaFile:
string: The raw uncompressed contents.
sha: Optional known sha for the object
"""
- pass
+ obj = object_class(type_num)()
+ obj.set_raw_string(string, sha)
+ return obj
@staticmethod
def from_raw_chunks(type_num: int, chunks: List[bytes], sha: Optional[ObjectID]=None):
@@ -241,7 +239,9 @@ class ShaFile:
chunks: An iterable of the raw uncompressed contents.
sha: Optional known sha for the object
"""
- pass
+ obj = object_class(type_num)()
+ obj.set_raw_chunks(chunks, sha)
+ return obj
@classmethod
def from_string(cls, string):
@@ -258,7 +258,8 @@ class ShaFile:
ObjectFormatException: with the given error_msg if member is
missing or is None
"""
- pass
+ if getattr(self, member, None) is None:
+ raise ObjectFormatException(error_msg)
def check(self) -> None:
"""Check this object for internal consistency.
@@ -268,24 +269,28 @@ class ShaFile:
ChecksumMismatch: if the object was created with a SHA that does
not match its contents
"""
- pass
-
+ # Subclasses should override this method
def raw_length(self) -> int:
"""Returns the length of the raw string of this object."""
- pass
+ return len(self.as_raw_string())
def sha(self):
"""The SHA1 object that is the name of this object."""
- pass
+ return sha1(self.as_raw_string())
def copy(self):
"""Create a new copy of this SHA1 object from its raw string."""
- pass
+ return type(self).from_raw_string(self.type_num, self.as_raw_string())
@property
def id(self):
"""The hex SHA of this object."""
- pass
+ return self.sha().hexdigest()
+
+ @property
+ def id(self):
+ """The hex SHA of this object."""
+ return self.sha().hexdigest()
def __repr__(self) -> str:
return f'<{self.__class__.__name__} {self.id}>'
@@ -330,7 +335,7 @@ class Blob(ShaFile):
Raises:
ObjectFormatException: if the object is malformed in some way
"""
- pass
+ super().check()
def splitlines(self) -> List[bytes]:
"""Return list of lines in this blob.
@@ -371,18 +376,46 @@ class Tag(ShaFile):
Raises:
ObjectFormatException: if the object is malformed in some way
"""
- pass
+ super().check()
+ if not self._name:
+ raise ObjectFormatException("Tag name not set")
+ if not self._object_sha:
+ raise ObjectFormatException("Tagged object SHA not set")
+ if not self._object_class:
+ raise ObjectFormatException("Tagged object type not set")
+ if not self._tagger:
+ raise ObjectFormatException("Tagger not set")
+ if self._tag_time is None:
+ raise ObjectFormatException("Tag time not set")
+ if not self._message:
+ raise ObjectFormatException("Tag message not set")
def _deserialize(self, chunks):
"""Grab the metadata attached to the tag."""
- pass
+ for field, value in _parse_message(chunks):
+ if field == b'object':
+ self._object_sha = value
+ elif field == b'type':
+ self._object_class = object_class(value)
+ elif field == b'tag':
+ self._name = value
+ elif field == b'tagger':
+ self._tagger = value
+ tagger_info = parse_time_entry(value)
+ self._tag_time = tagger_info[0]
+ self._tag_timezone = tagger_info[1]
+ self._tag_timezone_neg_utc = tagger_info[2]
+ elif field is None:
+ self._message = value
+ elif field == b'-----BEGIN PGP SIGNATURE-----':
+ self._signature = value
def _get_object(self):
"""Get the object pointed to by this tag.
Returns: tuple of (object class, sha).
"""
- pass
+ return (self._object_class, self._object_sha)
object = property(_get_object, _set_object)
name = serializable_property('name', 'The name of this tag')
tagger = serializable_property('tagger', 'Returns the name of the person who created this tag')
@@ -405,14 +438,34 @@ class Tag(ShaFile):
gpg.errors.MissingSignatures: if tag was not signed by a key
specified in keyids
"""
- pass
+ try:
+ import gpg
+ except ImportError:
+ raise ImportError("python-gpg is required for signature verification")
+
+ if not self._signature:
+ raise gpg.errors.MissingSignatures("Tag is not signed")
+
+ # Reconstruct the tag content without the signature
+ content = self._serialize_text()
+
+ # Verify the signature
+ with gpg.Context() as c:
+ _, result = c.verify(content, self._signature)
+ if not result:
+ raise gpg.errors.BadSignatures("Invalid signature")
+
+ if keyids:
+ trusted_sigs = [sig for sig in result.signatures if sig.fpr in keyids]
+ if not trusted_sigs:
+ raise gpg.errors.MissingSignatures("No trusted signatures found")
class TreeEntry(namedtuple('TreeEntry', ['path', 'mode', 'sha'])):
"""Named tuple encapsulating a single tree entry."""
def in_path(self, path: bytes):
"""Return a copy of this entry with the given path prepended."""
- pass
+ return TreeEntry(path=path + b'/' + self.path, mode=self.mode, sha=self.sha)
def parse_tree(text, strict=False):
"""Parse a tree text.
@@ -424,7 +477,17 @@ def parse_tree(text, strict=False):
Raises:
ObjectFormatException: if the object was malformed in some way
"""
- pass
+ pos = 0
+ while pos < len(text):
+ mode_end = text.index(b' ', pos)
+ mode = int(text[pos:mode_end], 8)
+ name_end = text.index(b'\0', mode_end)
+ name = text[mode_end+1:name_end]
+ sha = text[name_end+1:name_end+21]
+ if len(sha) != 20:
+ raise ObjectFormatException("SHA has invalid length")
+ yield (name, mode, sha)
+ pos = name_end + 21
def serialize_tree(items):
"""Serialize the items in a tree to a text.
@@ -433,7 +496,8 @@ def serialize_tree(items):
items: Sorted iterable over (name, mode, sha) tuples
Returns: Serialized tree text as chunks
"""
- pass
+ for name, mode, sha in items:
+ yield ("%04o %s\0" % (mode, name)).encode('ascii') + sha
def sorted_tree_items(entries, name_order: bool):
"""Iterate over a tree entries dictionary.
@@ -445,7 +509,10 @@ def sorted_tree_items(entries, name_order: bool):
entries: Dictionary mapping names to (mode, sha) tuples
Returns: Iterator over (name, mode, hexsha)
"""
- pass
+ key_func = key_entry_name_order if name_order else key_entry
+ for name, entry in sorted(entries.items(), key=lambda x: key_func((x[0], x[1]))):
+ mode, sha = entry
+ yield name, mode, sha.hex()
def key_entry(entry) -> bytes:
"""Sort key for tree entry.
@@ -453,11 +520,14 @@ def key_entry(entry) -> bytes:
Args:
entry: (name, value) tuple
"""
- pass
+ name, value = entry
+ if stat.S_ISDIR(value[0]):
+ name += b'/'
+ return name
def key_entry_name_order(entry):
"""Sort key for tree entry in name order."""
- pass
+ return entry[0]
def pretty_format_tree_entry(name, mode, hexsha, encoding='utf-8') -> str:
"""Pretty format tree entry.
@@ -468,7 +538,18 @@ def pretty_format_tree_entry(name, mode, hexsha, encoding='utf-8') -> str:
hexsha: Hexsha of the referenced object
Returns: string describing the tree entry
"""
- pass
+ mode_str = "%06o" % mode
+ if mode & stat.S_IFDIR:
+ type_str = "tree"
+ elif stat.S_ISLNK(mode):
+ type_str = "symlink"
+ elif stat.S_ISREG(mode):
+ type_str = "blob"
+ elif S_ISGITLINK(mode):
+ type_str = "commit"
+ else:
+ type_str = "unknown"
+ return "%s %s %s\t%s" % (mode_str, type_str, hexsha, name.decode(encoding))
class SubmoduleEncountered(Exception):
"""A submodule was encountered while resolving a path."""
@@ -525,36 +606,42 @@ class Tree(ShaFile):
name: The name of the entry, as a string.
hexsha: The hex SHA of the entry as a string.
"""
- pass
-
- def iteritems(self, name_order=False):
- """Iterate over entries.
+ def add(self, name: bytes, mode: int, hexsha: Union[bytes, str]) -> None:
+ """Add an entry to the tree.
Args:
- name_order: If True, iterate in name order instead of tree
- order.
- Returns: Iterator over (name, mode, sha) tuples
+ name: The name of the entry (string)
+ mode: The mode of the entry (int)
+ hexsha: The hex SHA of the entry (string)
"""
- pass
+ self._entries[name] = (mode, hexsha)
+ self._needs_serialization = True
+
+ def __contains__(self, name: bytes) -> bool:
+ return name in self._entries
- def items(self):
- """Return the sorted entries in this tree.
+ def __getitem__(self, name: bytes) -> Tuple[int, Union[bytes, str]]:
+ return self._entries[name]
- Returns: List with (name, mode, sha) tuples
+ def __setitem__(self, name: bytes, value: Tuple[int, Union[bytes, str]]) -> None:
+ """Set an entry in the tree.
+
+ Args:
+ name: The name of the entry (string)
+ value: A tuple of (mode, hexsha)
"""
- pass
+ mode, hexsha = value
+ self.add(name, mode, hexsha)
- def _deserialize(self, chunks):
- """Grab the entries in the tree."""
- pass
+ def __delitem__(self, name: bytes) -> None:
+ del self._entries[name]
+ self._needs_serialization = True
- def check(self):
- """Check this object for internal consistency.
+ def __len__(self) -> int:
+ return len(self._entries)
- Raises:
- ObjectFormatException: if the object is malformed in some way
- """
- pass
+ def __iter__(self) -> Iterator[bytes]:
+ return iter(self._entries)
def lookup_path(self, lookup_obj, path):
"""Look up an object in a Git tree.
@@ -564,7 +651,23 @@ class Tree(ShaFile):
path: Path to lookup
Returns: A tuple of (mode, SHA) of the resulting path.
"""
- pass
+ import stat
+ parts = path.split(b'/')
+ sha = self.id
+ mode = None
+ for p in parts:
+ if not p:
+ continue
+ if mode is not None and stat.S_ISDIR(mode):
+ tree = lookup_obj(sha)
+ if not isinstance(tree, Tree):
+ raise NotTreeError(sha)
+ if p not in tree:
+ raise KeyError(p)
+ mode, sha = tree[p]
+ else:
+ raise KeyError(p)
+ return mode, sha
def parse_timezone(text):
"""Parse a timezone text fragment (e.g. '+0100').
@@ -585,7 +688,13 @@ def format_timezone(offset, unnecessary_negative_timezone=False):
unnecessary_negative_timezone: Whether to use a minus sign for
UTC or positive timezones (-0000 and --700 rather than +0000 / +0700).
"""
- pass
+ if offset % 60 != 0:
+ raise ValueError("Unable to handle non-minute offset")
+ sign = '+'
+ offset_abs = abs(offset)
+ if offset < 0 or unnecessary_negative_timezone:
+ sign = '-'
+ return f"{sign}{offset_abs // 3600:02d}{(offset_abs // 60) % 60:02d}".encode('ascii')
def parse_time_entry(value):
"""Parse event.
@@ -601,7 +710,8 @@ def parse_time_entry(value):
def format_time_entry(person, time, timezone_info):
"""Format an event."""
- pass
+ timezone_offset, timezone_neg_utc = timezone_info
+ return b'%s %d %s' % (person, time, format_timezone(timezone_offset, timezone_neg_utc))
def parse_commit(chunks):
"""Parse a commit object from chunks.
@@ -611,7 +721,37 @@ def parse_commit(chunks):
Returns: Tuple of (tree, parents, author_info, commit_info,
encoding, mergetag, gpgsig, message, extra)
"""
- pass
+ tree = None
+ parents = []
+ author_info = None
+ commit_info = None
+ encoding = None
+ mergetag = []
+ gpgsig = None
+ extra = []
+ message = None
+
+ for field, value in _parse_message(chunks):
+ if field == b'tree':
+ tree = value
+ elif field == b'parent':
+ parents.append(value)
+ elif field == b'author':
+ author_info = parse_time_entry(value)
+ elif field == b'committer':
+ commit_info = parse_time_entry(value)
+ elif field == b'encoding':
+ encoding = value
+ elif field == b'mergetag':
+ mergetag.append(value)
+ elif field == b'gpgsig':
+ gpgsig = value
+ elif field is None:
+ message = value
+ else:
+ extra.append((field, value))
+
+ return (tree, parents, author_info, commit_info, encoding, mergetag, gpgsig, message, extra)
class Commit(ShaFile):
"""A git commit object."""
@@ -635,37 +775,40 @@ class Commit(ShaFile):
Raises:
ObjectFormatException: if the object is malformed in some way
"""
- pass
-
- def verify(self, keyids: Optional[Iterable[str]]=None):
- """Verify GPG signature for this commit (if it is signed).
-
- Args:
- keyids: Optional iterable of trusted keyids for this commit.
- If this commit is not signed by any key in keyids verification will
- fail. If not specified, this function only verifies that the commit
- has a valid signature.
+ def _deserialize(self, chunks):
+ """Deserialize the data in the commit object."""
+ (self._tree, self._parents, author_info, commit_info, self._encoding,
+ self._mergetag, self._gpgsig, self._message, self._extra) = parse_commit(chunks)
+ self._author, self._author_time, (self._author_timezone,
+ self._author_timezone_neg_utc) = author_info
+ self._committer, self._commit_time, (self._commit_timezone,
+ self._commit_timezone_neg_utc) = commit_info
- Raises:
- gpg.errors.BadSignatures: if GPG signature verification fails
- gpg.errors.MissingSignatures: if commit was not signed by a key
- specified in keyids
- """
- pass
- tree = serializable_property('tree', 'Tree that is the state of this commit')
+ def check(self):
+ """Check this object for internal consistency."""
+ super().check()
+ if not self._tree:
+ raise ObjectFormatException("Missing tree")
+ if not self._author:
+ raise ObjectFormatException("Missing author")
+ if not self._committer:
+ raise ObjectFormatException("Missing committer")
+ for parent in self._parents:
+ check_hexsha(parent, "Invalid parent sha")
+ check_hexsha(self._tree, "Invalid tree sha")
def _get_parents(self):
"""Return a list of parents of this commit."""
- pass
+ return self._parents
def _set_parents(self, value):
"""Set a list of parents of this commit."""
- pass
- parents = property(_get_parents, _set_parents, doc='Parents of this commit, by their SHA1.')
+ self._parents = value
+ self._needs_serialization = True
def _get_extra(self):
"""Return extra settings of this commit."""
- pass
+ return self._extra
extra = property(_get_extra, doc='Extra header fields not understood (presumably added in a newer version of git). Kept verbatim so the object can be correctly reserialized. For private commit metadata, use pseudo-headers in Commit.message, rather than this field.')
author = serializable_property('author', 'The name of the author of the commit')
committer = serializable_property('committer', 'The name of the committer of the commit')
@@ -687,4 +830,4 @@ _sorted_tree_items_py = sorted_tree_items
try:
from dulwich._objects import parse_tree, sorted_tree_items
except ImportError:
- pass
\ No newline at end of file
+ pass