From 8bb9c9e84d913b086f88e6006a2ef9d60d1c086d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jelmer=20Vernoo=C4=B3?= Date: Mon, 18 Nov 2024 14:54:15 +0000 Subject: [PATCH] Add more types, courtesy of ruff --- Makefile | 2 +- dulwich/bundle.py | 2 +- dulwich/cli.py | 156 +++++----- dulwich/client.py | 48 +-- dulwich/cloud/gcs.py | 4 +- dulwich/contrib/diffstat.py | 2 +- dulwich/contrib/paramiko_vendor.py | 2 +- dulwich/contrib/swift.py | 41 +-- dulwich/diff_tree.py | 25 +- dulwich/fastexport.py | 16 +- dulwich/file.py | 8 +- dulwich/graph.py | 16 +- dulwich/greenthreads.py | 4 +- dulwich/hooks.py | 3 +- dulwich/index.py | 18 +- dulwich/log_utils.py | 6 +- dulwich/lru_cache.py | 4 +- dulwich/mailmap.py | 4 +- dulwich/object_store.py | 70 ++--- dulwich/objects.py | 60 ++-- dulwich/pack.py | 28 +- dulwich/patch.py | 10 +- dulwich/porcelain.py | 102 +++---- dulwich/protocol.py | 21 +- dulwich/reflog.py | 2 +- dulwich/refs.py | 60 ++-- dulwich/repo.py | 44 +-- dulwich/server.py | 92 +++--- dulwich/stash.py | 2 +- dulwich/tests/test_object_store.py | 78 +++-- dulwich/tests/utils.py | 10 +- dulwich/walk.py | 11 +- dulwich/web.py | 24 +- fuzzing/fuzz-targets/fuzz_bundle.py | 5 +- fuzzing/fuzz-targets/fuzz_configfile.py | 5 +- fuzzing/fuzz-targets/fuzz_object_store.py | 5 +- fuzzing/fuzz-targets/fuzz_repo.py | 5 +- fuzzing/fuzz-targets/test_utils.py | 4 +- tests/__init__.py | 12 +- tests/compat/server_utils.py | 34 +-- tests/compat/test_client.py | 85 +++--- tests/compat/test_pack.py | 11 +- tests/compat/test_patch.py | 4 +- tests/compat/test_porcelain.py | 4 +- tests/compat/test_repository.py | 34 +-- tests/compat/test_server.py | 8 +- tests/compat/test_utils.py | 18 +- tests/compat/test_web.py | 29 +- tests/compat/utils.py | 16 +- tests/contrib/test_paramiko_vendor.py | 17 +- tests/contrib/test_release_robot.py | 8 +- tests/contrib/test_swift.py | 58 ++-- tests/contrib/test_swift_smoke.py | 30 +- tests/test_archive.py | 12 +- tests/test_blackbox.py | 10 +- tests/test_bundle.py | 2 +- tests/test_client.py | 279 ++++++++--------- tests/test_config.py | 126 ++++---- tests/test_credentials.py | 6 +- tests/test_diff_tree.py | 136 ++++----- tests/test_fastexport.py | 30 +- tests/test_file.py | 32 +- tests/test_grafts.py | 40 +-- tests/test_graph.py | 24 +- tests/test_greenthreads.py | 4 +- tests/test_hooks.py | 8 +- tests/test_ignore.py | 30 +- tests/test_index.py | 94 +++--- tests/test_lfs.py | 6 +- tests/test_line_ending.py | 36 +-- tests/test_lru_cache.py | 68 ++--- tests/test_mailmap.py | 4 +- tests/test_missing_obj_finder.py | 42 +-- tests/test_object_store.py | 78 ++--- tests/test_objects.py | 190 ++++++------ tests/test_objectspec.py | 64 ++-- tests/test_pack.py | 217 ++++++------- tests/test_patch.py | 43 +-- tests/test_porcelain.py | 356 +++++++++++----------- tests/test_protocol.py | 78 ++--- tests/test_reflog.py | 12 +- tests/test_refs.py | 116 +++---- tests/test_repository.py | 200 ++++++------ tests/test_server.py | 158 +++++----- tests/test_stash.py | 2 +- tests/test_utils.py | 14 +- tests/test_walk.py | 76 ++--- tests/test_web.py | 105 +++---- 88 files changed, 2067 insertions(+), 1998 deletions(-) diff --git a/Makefile b/Makefile index 3a4f1b22f..4114a98a8 100644 --- a/Makefile +++ b/Makefile @@ -45,7 +45,7 @@ check-contrib:: clean check-all: check check-pypy check-noextensions typing: - mypy dulwich + $(PYTHON) -m mypy dulwich clean:: $(SETUP) clean --all diff --git a/dulwich/bundle.py b/dulwich/bundle.py index aaec676fe..18f10eb08 100644 --- a/dulwich/bundle.py +++ b/dulwich/bundle.py @@ -103,7 +103,7 @@ def read_bundle(f): raise AssertionError(f"unsupported bundle format header: {firstline!r}") -def write_bundle(f, bundle): +def write_bundle(f, bundle) -> None: version = bundle.version if version is None: if bundle.capabilities: diff --git a/dulwich/cli.py b/dulwich/cli.py index eb7ed4362..da0dabf4f 100755 --- a/dulwich/cli.py +++ b/dulwich/cli.py @@ -33,7 +33,7 @@ import signal import sys from getopt import getopt -from typing import ClassVar, Optional +from typing import ClassVar, Optional, TYPE_CHECKING from dulwich import porcelain @@ -44,12 +44,16 @@ from .pack import Pack, sha_to_hex from .repo import Repo +if TYPE_CHECKING: + from .objects import ObjectID + from .refs import Ref -def signal_int(signal, frame): + +def signal_int(signal, frame) -> None: sys.exit(1) -def signal_quit(signal, frame): +def signal_quit(signal, frame) -> None: import pdb pdb.set_trace() @@ -58,13 +62,13 @@ def signal_quit(signal, frame): class Command: """A Dulwich subcommand.""" - def run(self, args): + def run(self, args) -> Optional[int]: """Run the command.""" raise NotImplementedError(self.run) class cmd_archive(Command): - def run(self, args): + def run(self, args) -> None: parser = argparse.ArgumentParser() parser.add_argument( "--remote", @@ -88,7 +92,7 @@ def run(self, args): class cmd_add(Command): - def run(self, argv): + def run(self, argv) -> None: parser = argparse.ArgumentParser() args = parser.parse_args(argv) @@ -96,7 +100,7 @@ def run(self, argv): class cmd_rm(Command): - def run(self, argv): + def run(self, argv) -> None: parser = argparse.ArgumentParser() args = parser.parse_args(argv) @@ -104,10 +108,11 @@ def run(self, argv): class cmd_fetch_pack(Command): - def run(self, argv): + def run(self, argv) -> None: parser = argparse.ArgumentParser() parser.add_argument("--all", action="store_true") parser.add_argument("location", nargs="?", type=str) + parser.add_argument("refs", nargs="*", type=str) args = parser.parse_args(argv) client, path = get_transport_and_path(args.location) r = Repo(".") @@ -115,26 +120,28 @@ def run(self, argv): determine_wants = r.object_store.determine_wants_all else: - def determine_wants(x, **kwargs): - return [y for y in args if y not in r.object_store] + def determine_wants(refs: dict[Ref, ObjectID], depth: Optional[int] = None) -> list[ObjectID]: + return [y.encode('utf-8') for y in args.refs if y not in r.object_store] client.fetch(path, r, determine_wants) class cmd_fetch(Command): - def run(self, args): + def run(self, args) -> None: opts, args = getopt(args, "", []) - opts = dict(opts) + kwopts = dict(opts) client, path = get_transport_and_path(args.pop(0)) r = Repo(".") - refs = client.fetch(path, r, progress=sys.stdout.write) + def progress(msg: bytes) -> None: + sys.stdout.buffer.write(msg) + refs = client.fetch(path, r, progress=progress) print("Remote refs:") for item in refs.items(): print("{} -> {}".format(*item)) class cmd_for_each_ref(Command): - def run(self, args): + def run(self, args) -> None: parser = argparse.ArgumentParser() parser.add_argument("pattern", type=str, nargs="?") args = parser.parse_args(args) @@ -143,15 +150,15 @@ def run(self, args): class cmd_fsck(Command): - def run(self, args): + def run(self, args) -> None: opts, args = getopt(args, "", []) - opts = dict(opts) + kwopts = dict(opts) for obj, msg in porcelain.fsck("."): print(f"{obj}: {msg}") class cmd_log(Command): - def run(self, args): + def run(self, args) -> None: parser = optparse.OptionParser() parser.add_option( "--reverse", @@ -177,7 +184,7 @@ def run(self, args): class cmd_diff(Command): - def run(self, args): + def run(self, args) -> None: opts, args = getopt(args, "", []) r = Repo(".") @@ -193,7 +200,7 @@ def run(self, args): class cmd_dump_pack(Command): - def run(self, args): + def run(self, args) -> None: opts, args = getopt(args, "", []) if args == []: @@ -204,8 +211,7 @@ def run(self, args): x = Pack(basename) print(f"Object names checksum: {x.name()}") print(f"Checksum: {sha_to_hex(x.get_stored_checksum())}") - if not x.check(): - print("CHECKSUM DOES NOT MATCH") + x.check() print("Length: %d" % len(x)) for name in x: try: @@ -217,7 +223,7 @@ def run(self, args): class cmd_dump_index(Command): - def run(self, args): + def run(self, args) -> None: opts, args = getopt(args, "", []) if args == []: @@ -232,20 +238,20 @@ def run(self, args): class cmd_init(Command): - def run(self, args): + def run(self, args) -> None: opts, args = getopt(args, "", ["bare"]) - opts = dict(opts) + kwopts = dict(opts) if args == []: path = os.getcwd() else: path = args[0] - porcelain.init(path, bare=("--bare" in opts)) + porcelain.init(path, bare=("--bare" in kwopts)) class cmd_clone(Command): - def run(self, args): + def run(self, args) -> None: parser = optparse.OptionParser() parser.add_option( "--bare", @@ -307,29 +313,29 @@ def run(self, args): class cmd_commit(Command): - def run(self, args): + def run(self, args) -> None: opts, args = getopt(args, "", ["message"]) - opts = dict(opts) - porcelain.commit(".", message=opts["--message"]) + kwopts = dict(opts) + porcelain.commit(".", message=kwopts["--message"]) class cmd_commit_tree(Command): - def run(self, args): + def run(self, args) -> None: opts, args = getopt(args, "", ["message"]) if args == []: print("usage: dulwich commit-tree tree") sys.exit(1) - opts = dict(opts) - porcelain.commit_tree(".", tree=args[0], message=opts["--message"]) + kwopts = dict(opts) + porcelain.commit_tree(".", tree=args[0], message=kwopts["--message"]) class cmd_update_server_info(Command): - def run(self, args): + def run(self, args) -> None: porcelain.update_server_info(".") class cmd_symbolic_ref(Command): - def run(self, args): + def run(self, args) -> None: opts, args = getopt(args, "", ["ref-name", "force"]) if not args: print("Usage: dulwich symbolic-ref REF_NAME [--force]") @@ -340,7 +346,7 @@ def run(self, args): class cmd_pack_refs(Command): - def run(self, argv): + def run(self, argv) -> None: parser = argparse.ArgumentParser() parser.add_argument("--all", action="store_true") # ignored, we never prune @@ -352,7 +358,7 @@ def run(self, argv): class cmd_show(Command): - def run(self, argv): + def run(self, argv) -> None: parser = argparse.ArgumentParser() parser.add_argument("objectish", type=str, nargs="*") args = parser.parse_args(argv) @@ -360,7 +366,7 @@ def run(self, argv): class cmd_diff_tree(Command): - def run(self, args): + def run(self, args) -> None: opts, args = getopt(args, "", []) if len(args) < 2: print("Usage: dulwich diff-tree OLD-TREE NEW-TREE") @@ -369,7 +375,7 @@ def run(self, args): class cmd_rev_list(Command): - def run(self, args): + def run(self, args) -> None: opts, args = getopt(args, "", []) if len(args) < 1: print("Usage: dulwich rev-list COMMITID...") @@ -378,7 +384,7 @@ def run(self, args): class cmd_tag(Command): - def run(self, args): + def run(self, args) -> None: parser = optparse.OptionParser() parser.add_option( "-a", @@ -396,28 +402,28 @@ def run(self, args): class cmd_repack(Command): - def run(self, args): + def run(self, args) -> None: opts, args = getopt(args, "", []) - opts = dict(opts) + kwopts = dict(opts) porcelain.repack(".") class cmd_reset(Command): - def run(self, args): + def run(self, args) -> None: opts, args = getopt(args, "", ["hard", "soft", "mixed"]) - opts = dict(opts) + kwopts = dict(opts) mode = "" - if "--hard" in opts: + if "--hard" in kwopts: mode = "hard" - elif "--soft" in opts: + elif "--soft" in kwopts: mode = "soft" - elif "--mixed" in opts: + elif "--mixed" in kwopts: mode = "mixed" - porcelain.reset(".", mode=mode, *args) + porcelain.reset(".", mode=mode) class cmd_daemon(Command): - def run(self, args): + def run(self, args) -> None: from dulwich import log_utils from .protocol import TCP_GIT_PORT @@ -450,7 +456,7 @@ def run(self, args): class cmd_web_daemon(Command): - def run(self, args): + def run(self, args) -> None: from dulwich import log_utils parser = optparse.OptionParser() @@ -481,14 +487,14 @@ def run(self, args): class cmd_write_tree(Command): - def run(self, args): + def run(self, args) -> None: parser = optparse.OptionParser() options, args = parser.parse_args(args) sys.stdout.write("{}\n".format(porcelain.write_tree("."))) class cmd_receive_pack(Command): - def run(self, args): + def run(self, args) -> None: parser = optparse.OptionParser() options, args = parser.parse_args(args) if len(args) >= 1: @@ -499,7 +505,7 @@ def run(self, args): class cmd_upload_pack(Command): - def run(self, args): + def run(self, args) -> None: parser = optparse.OptionParser() options, args = parser.parse_args(args) if len(args) >= 1: @@ -510,7 +516,7 @@ def run(self, args): class cmd_status(Command): - def run(self, args): + def run(self, args) -> None: parser = optparse.OptionParser() options, args = parser.parse_args(args) if len(args) >= 1: @@ -539,7 +545,7 @@ def run(self, args): class cmd_ls_remote(Command): - def run(self, args): + def run(self, args) -> None: opts, args = getopt(args, "", []) if len(args) < 1: print("Usage: dulwich ls-remote URL") @@ -550,7 +556,7 @@ def run(self, args): class cmd_ls_tree(Command): - def run(self, args): + def run(self, args) -> None: parser = optparse.OptionParser() parser.add_option( "-r", @@ -574,20 +580,20 @@ def run(self, args): class cmd_pack_objects(Command): - def run(self, args): + def run(self, args) -> None: deltify = False reuse_deltas = True opts, args = getopt(args, "", ["stdout", "deltify", "no-reuse-deltas"]) - opts = dict(opts) - if len(args) < 1 and "--stdout" not in opts.keys(): + kwopts = dict(opts) + if len(args) < 1 and "--stdout" not in kwopts.keys(): print("Usage: dulwich pack-objects basename") sys.exit(1) object_ids = [line.strip() for line in sys.stdin.readlines()] - if "--deltify" in opts.keys(): + if "--deltify" in kwopts.keys(): deltify = True - if "--no-reuse-deltas" in opts.keys(): + if "--no-reuse-deltas" in kwopts.keys(): reuse_deltas = False - if "--stdout" in opts.keys(): + if "--stdout" in kwopts.keys(): packf = getattr(sys.stdout, "buffer", sys.stdout) idxf = None close = [] @@ -604,7 +610,7 @@ def run(self, args): class cmd_pull(Command): - def run(self, args): + def run(self, args) -> None: parser = argparse.ArgumentParser() parser.add_argument("from_location", type=str) parser.add_argument("refspec", type=str, nargs="*") @@ -621,7 +627,7 @@ def run(self, args): class cmd_push(Command): - def run(self, argv): + def run(self, argv) -> Optional[int]: parser = argparse.ArgumentParser() parser.add_argument("-f", "--force", action="store_true", help="Force") parser.add_argument("to_location", type=str) @@ -635,9 +641,11 @@ def run(self, argv): sys.stderr.write("Diverged branches; specify --force to override") return 1 + return None + class cmd_remote_add(Command): - def run(self, args): + def run(self, args) -> None: parser = optparse.OptionParser() options, args = parser.parse_args(args) porcelain.remote_add(".", args[0], args[1]) @@ -669,7 +677,7 @@ class cmd_remote(SuperCommand): class cmd_submodule_list(Command): - def run(self, argv): + def run(self, argv) -> None: parser = argparse.ArgumentParser() parser.parse_args(argv) for path, sha in porcelain.submodule_list("."): @@ -677,7 +685,7 @@ def run(self, argv): class cmd_submodule_init(Command): - def run(self, argv): + def run(self, argv) -> None: parser = argparse.ArgumentParser() parser.parse_args(argv) porcelain.submodule_init(".") @@ -703,7 +711,7 @@ def run(self, args): class cmd_check_mailmap(Command): - def run(self, args): + def run(self, args) -> None: parser = optparse.OptionParser() options, args = parser.parse_args(args) for arg in args: @@ -712,7 +720,7 @@ def run(self, args): class cmd_stash_list(Command): - def run(self, args): + def run(self, args) -> None: parser = optparse.OptionParser() options, args = parser.parse_args(args) for i, entry in porcelain.stash_list("."): @@ -720,7 +728,7 @@ def run(self, args): class cmd_stash_push(Command): - def run(self, args): + def run(self, args) -> None: parser = optparse.OptionParser() options, args = parser.parse_args(args) porcelain.stash_push(".") @@ -728,7 +736,7 @@ def run(self, args): class cmd_stash_pop(Command): - def run(self, args): + def run(self, args) -> None: parser = optparse.OptionParser() options, args = parser.parse_args(args) porcelain.stash_pop(".") @@ -744,7 +752,7 @@ class cmd_stash(SuperCommand): class cmd_ls_files(Command): - def run(self, args): + def run(self, args) -> None: parser = optparse.OptionParser() options, args = parser.parse_args(args) for name in porcelain.ls_files("."): @@ -752,14 +760,14 @@ def run(self, args): class cmd_describe(Command): - def run(self, args): + def run(self, args) -> None: parser = optparse.OptionParser() options, args = parser.parse_args(args) print(porcelain.describe(".")) class cmd_help(Command): - def run(self, args): + def run(self, args) -> None: parser = optparse.OptionParser() parser.add_option( "-a", @@ -850,7 +858,7 @@ def main(argv=None): return cmd_kls().run(argv[1:]) -def _main(): +def _main() -> None: if "DULWICH_PDB" in os.environ and getattr(signal, "SIGQUIT", None): signal.signal(signal.SIGQUIT, signal_quit) # type: ignore signal.signal(signal.SIGINT, signal_int) diff --git a/dulwich/client.py b/dulwich/client.py index cbb8dffa6..1c9b77df7 100644 --- a/dulwich/client.py +++ b/dulwich/client.py @@ -239,7 +239,7 @@ def check(self): else: raise GitProtocolError(f"invalid ref status {status!r}") - def handle_packet(self, pkt): + def handle_packet(self, pkt) -> None: """Handle a packet. Raises: @@ -258,7 +258,7 @@ def handle_packet(self, pkt): self._ref_statuses.append(ref_status) -def negotiate_protocol_version(proto): +def negotiate_protocol_version(proto) -> int: pkt = proto.read_pkt_line() if pkt is not None and pkt.strip() == b"version 2": return 2 @@ -353,7 +353,7 @@ def __init__( self.new_shallow = new_shallow self.new_unshallow = new_unshallow - def _warn_deprecated(self): + def _warn_deprecated(self) -> None: import warnings warnings.warn( @@ -430,7 +430,7 @@ def __init__(self, refs, agent=None, ref_status=None) -> None: self.agent = agent self.ref_status = ref_status - def _warn_deprecated(self): + def _warn_deprecated(self) -> None: import warnings warnings.warn( @@ -663,7 +663,7 @@ def _handle_upload_pack_tail( progress: Optional[Callable[[bytes], None]] = None, rbufsize=_RBUFSIZE, protocol_version=0, -): +) -> None: """Handle the tail of a 'git-upload-pack' request. Args: @@ -696,7 +696,7 @@ def _handle_upload_pack_tail( if progress is None: # Just ignore progress data - def progress(x): + def progress(x) -> None: pass for chan, data in _read_side_band64k_data(proto.read_pkt_seq()): @@ -771,7 +771,7 @@ def __init__( self._fetch_capabilities.add(CAPABILITY_INCLUDE_TAG) self.protocol_version = 0 # will be overridden later - def get_url(self, path): + def get_url(self, path) -> str: """Retrieves full url to given path. Args: @@ -784,7 +784,7 @@ def get_url(self, path): raise NotImplementedError(self.get_url) @classmethod - def from_parsedurl(cls, parsedurl, **kwargs): + def from_parsedurl(cls, parsedurl, **kwargs) -> "GitClient": """Create an instance of this client from a urlparse.parsed object. Args: @@ -803,7 +803,7 @@ def send_pack( [set[bytes], set[bytes], bool], tuple[int, Iterator[UnpackedObject]] ], progress=None, - ): + ) -> SendPackResult: """Upload a pack to a remote repository. Args: @@ -969,13 +969,13 @@ def fetch( dir=getattr(target.object_store, "path", None), ) - def commit(): + def commit() -> None: if f.tell(): f.seek(0) - target.object_store.add_thin_pack(f.read, None, progress=progress) + target.object_store.add_thin_pack(f.read, None, progress=progress) # type: ignore f.close() - def abort(): + def abort() -> None: f.close() else: @@ -1012,7 +1012,7 @@ def fetch_pack( ref_prefix: Optional[list[Ref]] = None, filter_spec=None, protocol_version: Optional[int] = None, - ): + ) -> FetchPackResult: """Retrieve a pack from a git smart server. Args: @@ -1044,7 +1044,7 @@ def get_refs( path, protocol_version: Optional[int] = None, ref_prefix: Optional[list[Ref]] = None, - ): + ) -> dict[Ref, ObjectID]: """Retrieve the current refs from a git smart server. Args: @@ -1085,7 +1085,7 @@ def _handle_receive_pack_tail( if CAPABILITY_SIDE_BAND_64K in capabilities or self.protocol_version == 2: if progress is None: - def progress(x): + def progress(x) -> None: pass if CAPABILITY_REPORT_STATUS in capabilities: @@ -1147,12 +1147,12 @@ def archive( format=None, subdirs=None, prefix=None, - ): + ) -> None: """Retrieve an archive of the specified tree.""" raise NotImplementedError(self.archive) @staticmethod - def _warn_filter_objects(): + def _warn_filter_objects() -> None: import warnings warnings.warn( @@ -1161,7 +1161,7 @@ def _warn_filter_objects(): ) -def check_wants(wants, refs): +def check_wants(wants, refs) -> None: """Check that a set of wants is valid. Args: @@ -1516,7 +1516,7 @@ def archive( format=None, subdirs=None, prefix=None, - ): + ) -> None: proto, can_read, stderr = self._connect(b"upload-archive", path) with proto: if format is not None: @@ -1607,7 +1607,7 @@ def _connect( # 0 means unbuffered wfile = s.makefile("wb", 0) - def close(): + def close() -> None: rfile.close() wfile.close() s.close() @@ -1664,7 +1664,7 @@ def can_read(self): else: return _fileno_can_read(self.proc.stdout.fileno()) - def close(self): + def close(self) -> None: self.proc.stdin.close() self.proc.stdout.close() if self.proc.stderr: @@ -1784,7 +1784,7 @@ def send_pack(self, path, update_refs, generate_pack_data, progress=None): """ if not progress: - def progress(x): + def progress(x) -> None: pass with self._open_repo(path) as target: @@ -1948,7 +1948,7 @@ def run_command( key_filename=None, ssh_command=None, protocol_version: Optional[int] = None, - ): + ): """Connect to an SSH server. Run a command remotely and return a file-like object for interaction @@ -2313,7 +2313,7 @@ def default_urllib3_manager( return manager -def check_for_proxy_bypass(base_url): +def check_for_proxy_bypass(base_url) -> bool: # Check if a proxy bypass is defined with the no_proxy environment variable if base_url: # only check if base_url is provided no_proxy_str = os.environ.get("no_proxy") diff --git a/dulwich/cloud/gcs.py b/dulwich/cloud/gcs.py index b9dd9b4ca..aa4d0ffbd 100644 --- a/dulwich/cloud/gcs.py +++ b/dulwich/cloud/gcs.py @@ -39,7 +39,7 @@ def __init__(self, bucket, subpath="") -> None: def __repr__(self) -> str: return f"{type(self).__name__}({self.bucket!r}, subpath={self.subpath!r})" - def _remove_pack(self, name): + def _remove_pack(self, name) -> None: self.bucket.delete_blobs( [posixpath.join(self.subpath, name) + "." + ext for ext in ["pack", "idx"]] ) @@ -72,7 +72,7 @@ def _get_pack(self, name): lambda: self._load_pack_data(name), lambda: self._load_pack_index(name) ) - def _upload_pack(self, basename, pack_file, index_file): + def _upload_pack(self, basename, pack_file, index_file) -> None: idxblob = self.bucket.blob(posixpath.join(self.subpath, basename + ".idx")) datablob = self.bucket.blob(posixpath.join(self.subpath, basename + ".pack")) idxblob.upload_from_file(index_file) diff --git a/dulwich/contrib/diffstat.py b/dulwich/contrib/diffstat.py index 8ce844ee4..e24e22b0a 100755 --- a/dulwich/contrib/diffstat.py +++ b/dulwich/contrib/diffstat.py @@ -188,7 +188,7 @@ def diffstat(lines, max_width=80): return output -def main(): +def main() -> int: argv = sys.argv # allow diffstat.py to also be used from the command line if len(sys.argv) > 1: diff --git a/dulwich/contrib/paramiko_vendor.py b/dulwich/contrib/paramiko_vendor.py index 4d6da7f46..c622e6197 100644 --- a/dulwich/contrib/paramiko_vendor.py +++ b/dulwich/contrib/paramiko_vendor.py @@ -66,7 +66,7 @@ def read(self, n=None): return data + self.read(diff_len) return data - def close(self): + def close(self) -> None: self.channel.close() diff --git a/dulwich/contrib/swift.py b/dulwich/contrib/swift.py index 1fa48a9c0..87e4fb8e1 100644 --- a/dulwich/contrib/swift.py +++ b/dulwich/contrib/swift.py @@ -36,6 +36,7 @@ import zlib from configparser import ConfigParser from io import BytesIO +from typing import Optional from geventhttpclient import HTTPClient @@ -302,7 +303,7 @@ def swift_auth_v2(self): ) return endpoint[self.endpoint_type], token - def test_root_exists(self): + def test_root_exists(self) -> Optional[bool]: """Check that Swift container exist. Returns: True if exist or None it not @@ -316,7 +317,7 @@ def test_root_exists(self): ) return True - def create_root(self): + def create_root(self) -> None: """Create the Swift container. Raises: @@ -368,7 +369,7 @@ def get_object_stat(self, name): resp_headers[header.lower()] = value return resp_headers - def put_object(self, name, content): + def put_object(self, name, content) -> None: """Put an object. Args: @@ -425,7 +426,7 @@ def get_object(self, name, range=None): return content return BytesIO(content) - def del_object(self, name): + def del_object(self, name) -> None: """Delete an object. Args: @@ -440,7 +441,7 @@ def del_object(self, name): f"DELETE request failed with error code {ret.status_code}" ) - def del_root(self): + def del_root(self) -> None: """Delete the root container by removing container content. Raises: @@ -481,7 +482,7 @@ def __init__(self, scon, filename, pack_length) -> None: self.buff = b"" self.buff_length = self.scon.chunk_length - def _read(self, more=False): + def _read(self, more=False) -> None: if more: self.buff_length = self.buff_length * 2 offset = self.base_offset @@ -510,7 +511,7 @@ def read(self, length): self.offset = end return data - def seek(self, offset): + def seek(self, offset) -> None: """Seek to a specified offset. Args: @@ -568,7 +569,7 @@ def get_stored_checksum(self): pack_reader = SwiftPackReader(self.scon, self._filename, self.pack_length) return pack_reader.read_checksum() - def close(self): + def close(self) -> None: pass @@ -695,22 +696,22 @@ def commit(): else: return None - def abort(): + def abort() -> None: pass return f, commit, abort - def add_object(self, obj): + def add_object(self, obj) -> None: self.add_objects( [ (obj, None), ] ) - def _pack_cache_stale(self): + def _pack_cache_stale(self) -> bool: return False - def _get_loose_object(self, sha): + def _get_loose_object(self, sha) -> None: return None def add_thin_pack(self, read_all, read_some): @@ -815,12 +816,12 @@ def _load_check_ref(self, name, old_ref): return False return refs - def _write_refs(self, refs): + def _write_refs(self, refs) -> None: f = BytesIO() f.writelines(write_info_refs(refs, self.store)) self.scon.put_object(self.filename, f) - def set_if_equals(self, name, old_ref, new_ref): + def set_if_equals(self, name, old_ref, new_ref, committer=None, timestamp=None, timezone=None, message=None) -> bool: """Set a refname to new_ref only if it currently equals old_ref.""" if name == "HEAD": return True @@ -832,7 +833,7 @@ def set_if_equals(self, name, old_ref, new_ref): self._refs[name] = new_ref return True - def remove_if_equals(self, name, old_ref): + def remove_if_equals(self, name, old_ref, committer=None, timestamp=None, timezone=None, message=None) -> bool: """Remove a refname only if it currently equals old_ref.""" if name == "HEAD": return True @@ -879,14 +880,14 @@ def __init__(self, root, conf) -> None: refs = SwiftInfoRefsContainer(self.scon, object_store) BaseRepo.__init__(self, object_store, refs) - def _determine_file_mode(self): + def _determine_file_mode(self) -> bool: """Probe the file-system to determine whether permissions can be trusted. Returns: True if permissions can be trusted, False otherwise. """ return False - def _put_named_file(self, filename, contents): + def _put_named_file(self, filename, contents) -> None: """Put an object in a Swift container. Args: @@ -928,7 +929,7 @@ def open_repository(self, path): return SwiftRepo(path, self.conf) -def cmd_daemon(args): +def cmd_daemon(args) -> None: """Entry point for starting a TCP git server.""" import optparse @@ -980,7 +981,7 @@ def cmd_daemon(args): server.serve_forever() -def cmd_init(args): +def cmd_init(args) -> None: import optparse parser = optparse.OptionParser() @@ -1001,7 +1002,7 @@ def cmd_init(args): SwiftRepo.init_bare(scon, conf) -def main(argv=sys.argv): +def main(argv=sys.argv) -> None: commands = { "init": cmd_init, "daemon": cmd_daemon, diff --git a/dulwich/diff_tree.py b/dulwich/diff_tree.py index a50e1a96a..9bf986893 100644 --- a/dulwich/diff_tree.py +++ b/dulwich/diff_tree.py @@ -230,7 +230,7 @@ def tree_changes( yield TreeChange(change_type, entry1, entry2) -def _all_eq(seq, key, value): +def _all_eq(seq, key, value) -> bool: for e in seq: if key(e) != value: return False @@ -411,6 +411,11 @@ def _tree_change_key(entry): class RenameDetector: """Object for handling rename detection between two trees.""" + _adds: list[TreeChange] + _deletes: list[TreeChange] + _changes: list[TreeChange] + _candidates: list[tuple[int, TreeChange]] + def __init__( self, store, @@ -443,7 +448,7 @@ def __init__( self._find_copies_harder = find_copies_harder self._want_unchanged = False - def _reset(self): + def _reset(self) -> None: self._adds = [] self._deletes = [] self._changes = [] @@ -459,7 +464,7 @@ def _should_split(self, change): new_obj = self._store[change.new.sha] return _similarity_score(old_obj, new_obj) < self._rewrite_threshold - def _add_change(self, change): + def _add_change(self, change) -> None: if change.type == CHANGE_ADD: self._adds.append(change) elif change.type == CHANGE_DELETE: @@ -478,7 +483,7 @@ def _add_change(self, change): else: self._changes.append(change) - def _collect_changes(self, tree1_id, tree2_id): + def _collect_changes(self, tree1_id, tree2_id) -> None: want_unchanged = self._find_copies_harder or self._want_unchanged for change in tree_changes( self._store, @@ -489,11 +494,11 @@ def _collect_changes(self, tree1_id, tree2_id): ): self._add_change(change) - def _prune(self, add_paths, delete_paths): + def _prune(self, add_paths, delete_paths) -> None: self._adds = [a for a in self._adds if a.new.path not in add_paths] self._deletes = [d for d in self._deletes if d.old.path not in delete_paths] - def _find_exact_renames(self): + def _find_exact_renames(self) -> None: add_map = defaultdict(list) for add in self._adds: add_map[add.new.sha].append(add.new) @@ -541,7 +546,7 @@ def _rename_type(self, check_paths, delete, add): return CHANGE_COPY return CHANGE_RENAME - def _find_content_rename_candidates(self): + def _find_content_rename_candidates(self) -> None: candidates = self._candidates = [] # TODO: Optimizations: # - Compare object sizes before counting blocks. @@ -570,7 +575,7 @@ def _find_content_rename_candidates(self): rename = TreeChange(new_type, delete.old, add.new) candidates.append((-score, rename)) - def _choose_content_renames(self): + def _choose_content_renames(self) -> None: # Sort scores from highest to lowest, but keep names in ascending # order. self._candidates.sort() @@ -594,7 +599,7 @@ def _choose_content_renames(self): self._changes.append(change) self._prune(add_paths, delete_paths) - def _join_modifies(self): + def _join_modifies(self) -> None: if self._rewrite_threshold is None: return @@ -620,7 +625,7 @@ def _sorted_changes(self): result.sort(key=_tree_change_key) return result - def _prune_unchanged(self): + def _prune_unchanged(self) -> None: if self._want_unchanged: return self._deletes = [d for d in self._deletes if d.type != CHANGE_UNCHANGED] diff --git a/dulwich/fastexport.py b/dulwich/fastexport.py index 207feef33..6e591953c 100644 --- a/dulwich/fastexport.py +++ b/dulwich/fastexport.py @@ -45,7 +45,7 @@ def __init__(self, outf, store) -> None: self.markers: dict[bytes, bytes] = {} self._marker_idx = 0 - def print_cmd(self, cmd): + def print_cmd(self, cmd) -> None: self.outf.write(getattr(cmd, "__bytes__", cmd.__repr__)() + b"\n") def _allocate_marker(self): @@ -138,17 +138,17 @@ def import_stream(self, stream): self.process(p.iter_commands) return self.markers - def blob_handler(self, cmd): + def blob_handler(self, cmd) -> None: """Process a BlobCommand.""" blob = Blob.from_string(cmd.data) self.repo.object_store.add_object(blob) if cmd.mark: self.markers[cmd.mark] = blob.id - def checkpoint_handler(self, cmd): + def checkpoint_handler(self, cmd) -> None: """Process a CheckpointCommand.""" - def commit_handler(self, cmd): + def commit_handler(self, cmd) -> None: """Process a CommitCommand.""" commit = Commit() if cmd.author is not None: @@ -207,10 +207,10 @@ def commit_handler(self, cmd): if cmd.mark: self.markers[cmd.mark] = commit.id - def progress_handler(self, cmd): + def progress_handler(self, cmd) -> None: """Process a ProgressCommand.""" - def _reset_base(self, commit_id): + def _reset_base(self, commit_id) -> None: if self.last_commit == commit_id: return self._contents = {} @@ -224,7 +224,7 @@ def _reset_base(self, commit_id): ) in iter_tree_contents(self.repo.object_store, tree_id): self._contents[path] = (mode, hexsha) - def reset_handler(self, cmd): + def reset_handler(self, cmd) -> None: """Process a ResetCommand.""" if cmd.from_ is None: from_ = ZERO_SHA @@ -233,7 +233,7 @@ def reset_handler(self, cmd): self._reset_base(from_) self.repo.refs[cmd.ref] = from_ - def tag_handler(self, cmd): + def tag_handler(self, cmd) -> None: """Process a TagCommand.""" tag = Tag() tag.tagger = cmd.tagger diff --git a/dulwich/file.py b/dulwich/file.py index 9366df1df..9930918c7 100644 --- a/dulwich/file.py +++ b/dulwich/file.py @@ -26,7 +26,7 @@ from typing import ClassVar -def ensure_dir_exists(dirname): +def ensure_dir_exists(dirname) -> None: """Ensure a directory exists, creating if necessary.""" try: os.makedirs(dirname) @@ -34,7 +34,7 @@ def ensure_dir_exists(dirname): pass -def _fancy_rename(oldname, newname): +def _fancy_rename(oldname, newname) -> None: """Rename file with temporary backup file to rollback if rename fails.""" if not os.path.exists(newname): try: @@ -159,7 +159,7 @@ def __init__(self, filename, mode, bufsize, mask) -> None: for method in self.PROXY_METHODS: setattr(self, method, getattr(self._file, method)) - def abort(self): + def abort(self) -> None: """Close and discard the lockfile without overwriting the target. If the file is already closed, this is a no-op. @@ -174,7 +174,7 @@ def abort(self): # The file may have been removed already, which is ok. self._closed = True - def close(self): + def close(self) -> None: """Close this file, saving the lockfile over the original. Note: If this method fails, it will attempt to delete the lockfile. diff --git a/dulwich/graph.py b/dulwich/graph.py index 9c18dca64..94aba2077 100644 --- a/dulwich/graph.py +++ b/dulwich/graph.py @@ -20,29 +20,31 @@ """Implementation of merge-base following the approach of git.""" from heapq import heappop, heappush +from typing import Generic, Optional, Iterator, TypeVar from .lru_cache import LRUCache +T = TypeVar("T") # priority queue using builtin python minheap tools # why they do not have a builtin maxheap is simply ridiculous but # liveable with integer time stamps using negation -class WorkList: - def __init__(self): - self.pq = [] +class WorkList(Generic[T]): + def __init__(self) -> None: + self.pq: list[tuple[int, T]] = [] - def add(self, item): + def add(self, item: tuple[int, T]) -> None: dt, cmt = item heappush(self.pq, (-dt, cmt)) - def get(self): + def get(self) -> Optional[tuple[int, T]]: item = heappop(self.pq) if item: pr, cmt = item return -pr, cmt return None - def iter(self): + def iter(self) -> Iterator[tuple[int, T]]: for pr, cmt in self.pq: yield (-pr, cmt) @@ -57,7 +59,7 @@ def _find_lcas(lookup_parents, c1, c2s, lookup_stamp, min_stamp=0): _DNC = 4 # Do Not Consider _LCA = 8 # potential LCA (Lowest Common Ancestor) - def _has_candidates(wlst, cstates): + def _has_candidates(wlst, cstates) -> bool: for dt, cmt in wlst.iter(): if cmt in cstates: if not ((cstates[cmt] & _DNC) == _DNC): diff --git a/dulwich/greenthreads.py b/dulwich/greenthreads.py index fd3ed7f19..314e5f271 100644 --- a/dulwich/greenthreads.py +++ b/dulwich/greenthreads.py @@ -44,7 +44,7 @@ def _split_commits_and_tags(obj_store, lst, *, ignore_unknown=False, pool=None): commits = set() tags = set() - def find_commit_type(sha): + def find_commit_type(sha) -> None: try: o = obj_store[sha] except KeyError: @@ -81,7 +81,7 @@ def __init__( concurrency=1, get_parents=None, ) -> None: - def collect_tree_sha(sha): + def collect_tree_sha(sha) -> None: self.sha_done.add(sha) cmt = object_store[sha] _collect_filetree_revs(object_store, cmt.tree, self.sha_done) diff --git a/dulwich/hooks.py b/dulwich/hooks.py index b1f56fc57..740c67167 100644 --- a/dulwich/hooks.py +++ b/dulwich/hooks.py @@ -22,6 +22,7 @@ import os import subprocess +from typing import Any from .errors import HookError @@ -29,7 +30,7 @@ class Hook: """Generic hook object.""" - def execute(self, *args): + def execute(self, *args) -> Any: """Execute the hook with the given args. Args: diff --git a/dulwich/index.py b/dulwich/index.py index be9d279ce..a0ba4b037 100644 --- a/dulwich/index.py +++ b/dulwich/index.py @@ -192,7 +192,7 @@ def read_cache_time(f): return struct.unpack(">LL", f.read(8)) -def write_cache_time(f, t): +def write_cache_time(f, t) -> None: """Write a cache time. Args: @@ -337,7 +337,7 @@ def read_index_dict(f) -> dict[bytes, Union[IndexEntry, ConflictedIndexEntry]]: def write_index( f: BinaryIO, entries: list[SerializedIndexEntry], version: Optional[int] = None -): +) -> None: """Write an index file. Args: @@ -440,7 +440,7 @@ def write(self) -> None: finally: f.close() - def read(self): + def read(self) -> None: """Read current contents of index from disk.""" if not os.path.exists(self._filename): return @@ -470,7 +470,7 @@ def __iter__(self) -> Iterator[bytes]: """Iterate over the paths and stages in this index.""" return iter(self._byname) - def __contains__(self, key): + def __contains__(self, key) -> bool: return key in self._byname def get_sha1(self, path: bytes) -> bytes: @@ -501,7 +501,7 @@ def has_conflicts(self) -> bool: return True return False - def clear(self): + def clear(self) -> None: """Remove all contents from this index.""" self._byname = {} @@ -522,7 +522,7 @@ def iteritems( def items(self) -> Iterator[tuple[bytes, Union[IndexEntry, ConflictedIndexEntry]]]: return iter(self._byname.items()) - def update(self, entries: dict[bytes, Union[IndexEntry, ConflictedIndexEntry]]): + def update(self, entries: dict[bytes, Union[IndexEntry, ConflictedIndexEntry]]) -> None: for key, value in entries.items(): self[key] = value @@ -808,7 +808,7 @@ def build_index_from_tree( honor_filemode: bool = True, validate_path_element=validate_path_element_default, symlink_fn=None, -): +) -> None: """Generate and materialize index from a tree. Args: @@ -935,7 +935,7 @@ def read_submodule_head(path: Union[str, bytes]) -> Optional[bytes]: return None -def _has_directory_changed(tree_path: bytes, entry): +def _has_directory_changed(tree_path: bytes, entry) -> bool: """Check if a directory has changed after getting an error. When handling an error trying to create a blob from a path, call this @@ -1123,7 +1123,7 @@ def iter_fresh_objects( yield path, entry.sha, cleanup_mode(entry.mode) -def refresh_index(index: Index, root_path: bytes): +def refresh_index(index: Index, root_path: bytes) -> None: """Refresh the contents of an index. This is the equivalent to running 'git commit -a'. diff --git a/dulwich/log_utils.py b/dulwich/log_utils.py index 556332dfc..27201963e 100644 --- a/dulwich/log_utils.py +++ b/dulwich/log_utils.py @@ -44,7 +44,7 @@ class _NullHandler(logging.Handler): """No-op logging handler to avoid unexpected logging warnings.""" - def emit(self, record): + def emit(self, record) -> None: pass @@ -53,7 +53,7 @@ def emit(self, record): _DULWICH_LOGGER.addHandler(_NULL_HANDLER) -def default_logging_config(): +def default_logging_config() -> None: """Set up the default Dulwich loggers.""" remove_null_handler() logging.basicConfig( @@ -63,7 +63,7 @@ def default_logging_config(): ) -def remove_null_handler(): +def remove_null_handler() -> None: """Remove the null handler from the Dulwich loggers. If a caller wants to set up logging using something other than diff --git a/dulwich/lru_cache.py b/dulwich/lru_cache.py index 225edb4c9..6c2c55b07 100644 --- a/dulwich/lru_cache.py +++ b/dulwich/lru_cache.py @@ -214,7 +214,7 @@ def items(self) -> dict[K, V]: """Get the key:value pairs as a dict.""" return {k: n.value for k, n in self._cache.items()} - def cleanup(self): + def cleanup(self) -> None: """Clear the cache until it shrinks to the requested size. This does not completely wipe the cache, just makes sure it is under @@ -291,7 +291,7 @@ def resize(self, max_cache: int, after_cleanup_count: Optional[int] = None) -> N """Change the number of entries that will be cached.""" self._update_max_cache(max_cache, after_cleanup_count=after_cleanup_count) - def _update_max_cache(self, max_cache, after_cleanup_count=None): + def _update_max_cache(self, max_cache, after_cleanup_count=None) -> None: self._max_cache = max_cache if after_cleanup_count is None: self._after_cleanup_count = self._max_cache * 8 / 10 diff --git a/dulwich/mailmap.py b/dulwich/mailmap.py index 49f05c20f..951e52476 100644 --- a/dulwich/mailmap.py +++ b/dulwich/mailmap.py @@ -64,12 +64,12 @@ class Mailmap: """Class for accessing a mailmap file.""" def __init__(self, map=None) -> None: - self._table: dict[tuple[Optional[str], str], tuple[str, str]] = {} + self._table: dict[tuple[Optional[str], Optional[str]], tuple[str, str]] = {} if map: for canonical_identity, from_identity in map: self.add_entry(canonical_identity, from_identity) - def add_entry(self, canonical_identity, from_identity=None): + def add_entry(self, canonical_identity, from_identity=None) -> None: """Add an entry to the mail mail. Any of the fields can be None, but at least one of them needs to be diff --git a/dulwich/object_store.py b/dulwich/object_store.py index 03ce91aee..6d427e020 100644 --- a/dulwich/object_store.py +++ b/dulwich/object_store.py @@ -114,7 +114,7 @@ def _want_deepen(sha): and not sha == ZERO_SHA ] - def contains_loose(self, sha): + def contains_loose(self, sha) -> bool: """Check if a particular object is present by SHA1 and is loose.""" raise NotImplementedError(self.contains_loose) @@ -130,7 +130,7 @@ def packs(self): """Iterable of pack objects.""" raise NotImplementedError - def get_raw(self, name): + def get_raw(self, name) -> tuple[int, bytes]: """Obtain the raw text for an object. Args: @@ -148,11 +148,11 @@ def __iter__(self): """Iterate over the SHAs that are present in this store.""" raise NotImplementedError(self.__iter__) - def add_object(self, obj): + def add_object(self, obj) -> None: """Add a single object to this object store.""" raise NotImplementedError(self.add_object) - def add_objects(self, objects, progress=None): + def add_objects(self, objects, progress=None) -> None: """Add a set of objects to this object store. Args: @@ -348,7 +348,7 @@ def _get_depth( ) return current_depth - def close(self): + def close(self) -> None: """Close any files opened by this object store.""" # Default implementation is a NO-OP @@ -404,7 +404,7 @@ def add_pack_data( def alternates(self): return [] - def contains_packed(self, sha): + def contains_packed(self, sha) -> bool: """Check if a particular object is present by SHA1 and is packed. This does not check alternates. @@ -429,7 +429,7 @@ def __contains__(self, sha) -> bool: return True return False - def _add_cached_pack(self, base_name, pack): + def _add_cached_pack(self, base_name, pack) -> None: """Add a newly appeared pack to the cache by path.""" prev_pack = self._pack_cache.get(base_name) if prev_pack is not pack: @@ -462,7 +462,7 @@ def generate_pack_data( other_haves=remote_has, ) - def _clear_cached_packs(self): + def _clear_cached_packs(self) -> None: pack_cache = self._pack_cache self._pack_cache = {} while pack_cache: @@ -472,10 +472,10 @@ def _clear_cached_packs(self): def _iter_cached_packs(self): return self._pack_cache.values() - def _update_pack_cache(self): + def _update_pack_cache(self) -> list[Pack]: raise NotImplementedError(self._update_pack_cache) - def close(self): + def close(self) -> None: self._clear_cached_packs() @property @@ -492,13 +492,13 @@ def _iter_loose_objects(self): """Iterate over the SHAs of all loose objects.""" raise NotImplementedError(self._iter_loose_objects) - def _get_loose_object(self, sha): + def _get_loose_object(self, sha) -> Optional[ShaFile]: raise NotImplementedError(self._get_loose_object) - def _remove_loose_object(self, sha): + def _remove_loose_object(self, sha) -> None: raise NotImplementedError(self._remove_loose_object) - def _remove_pack(self, name): + def _remove_pack(self, name) -> None: raise NotImplementedError(self._remove_pack) def pack_loose_objects(self): @@ -793,7 +793,7 @@ def _read_alternate_paths(self): else: yield os.fsdecode(os.path.join(os.fsencode(self.path), line)) - def add_alternate_path(self, path): + def add_alternate_path(self, path) -> None: """Add an alternate path to this object store.""" try: os.mkdir(os.path.join(self.path, INFODIR)) @@ -864,10 +864,10 @@ def _get_loose_object(self, sha): except FileNotFoundError: return None - def _remove_loose_object(self, sha): + def _remove_loose_object(self, sha) -> None: os.remove(self._get_shafile_path(sha)) - def _remove_pack(self, pack): + def _remove_pack(self, pack) -> None: try: del self._pack_cache[os.path.basename(pack._basename)] except KeyError: @@ -997,13 +997,13 @@ def commit(): os.remove(path) return None - def abort(): + def abort() -> None: f.close() os.remove(path) return f, commit, abort - def add_object(self, obj): + def add_object(self, obj) -> None: """Add a single object to this object store. Args: @@ -1087,7 +1087,7 @@ def contains_loose(self, sha): """Check if a particular object is present by SHA1 and is loose.""" return self._to_hexsha(sha) in self._data - def contains_packed(self, sha): + def contains_packed(self, sha) -> bool: """Check if a particular object is present by SHA1 and is packed.""" return False @@ -1117,11 +1117,11 @@ def __delitem__(self, name: ObjectID) -> None: """Delete an object from this store, for testing only.""" del self._data[self._to_hexsha(name)] - def add_object(self, obj): + def add_object(self, obj) -> None: """Add a single object to this object store.""" self._data[obj.id] = obj.copy() - def add_objects(self, objects, progress=None): + def add_objects(self, objects, progress=None) -> None: """Add a set of objects to this object store. Args: @@ -1143,7 +1143,7 @@ def add_pack(self): f = SpooledTemporaryFile(max_size=PACK_SPOOL_FILE_MAX_SIZE, prefix="incoming-") - def commit(): + def commit() -> None: size = f.tell() if size > 0: f.seek(0) @@ -1154,7 +1154,7 @@ def commit(): else: f.close() - def abort(): + def abort() -> None: f.close() return f, commit, abort @@ -1171,7 +1171,7 @@ def add_pack_data( for unpacked_object in unpacked_objects: self.add_object(unpacked_object.sha_file()) - def add_thin_pack(self, read_all, read_some, progress=None): + def add_thin_pack(self, read_all, read_some, progress=None) -> None: """Add a new thin pack to this object store. Thin packs are packs that contain deltas with parents that exist @@ -1372,7 +1372,7 @@ def get_remote_has(self): def add_todo( self, entries: Iterable[tuple[ObjectID, Optional[bytes], Optional[int], bool]] - ): + ) -> None: self.objects_to_send.update([e for e in entries if e[0] not in self.sha_done]) def __next__(self) -> tuple[bytes, Optional[PackHint]]: @@ -1445,10 +1445,10 @@ def __init__(self, local_heads, get_parents, shallow=None) -> None: shallow = set() self.shallow = shallow - def nak(self): + def nak(self) -> None: """Nothing in common was found.""" - def ack(self, sha): + def ack(self, sha) -> None: """Ack that a revision and its ancestors are present in the source.""" if len(sha) != 40: raise ValueError(f"unexpected sha {sha!r} received") @@ -1610,13 +1610,13 @@ def get_raw(self, sha_id): pass raise KeyError(sha_id) - def contains_packed(self, sha): + def contains_packed(self, sha) -> bool: for b in self.bases: if b.contains_packed(sha): return True return False - def contains_loose(self, sha): + def contains_loose(self, sha) -> bool: for b in self.bases: if b.contains_loose(sha): return True @@ -1641,20 +1641,20 @@ def _iter_loose_objects(self): """Iterate over the SHAs of all loose objects.""" return iter([]) - def _get_loose_object(self, sha): + def _get_loose_object(self, sha) -> None: return None - def _remove_loose_object(self, sha): + def _remove_loose_object(self, sha) -> None: # Doesn't exist.. pass - def _remove_pack(self, name): + def _remove_pack(self, name) -> None: raise NotImplementedError(self._remove_pack) - def _iter_pack_names(self): + def _iter_pack_names(self) -> Iterator[str]: raise NotImplementedError(self._iter_pack_names) - def _get_pack(self, name): + def _get_pack(self, name) -> Pack: raise NotImplementedError(self._get_pack) def _update_pack_cache(self): @@ -1672,7 +1672,7 @@ def _update_pack_cache(self): self._pack_cache.pop(f).close() return new_packs - def _upload_pack(self, basename, pack_file, index_file): + def _upload_pack(self, basename, pack_file, index_file) -> None: raise NotImplementedError def add_pack(self): diff --git a/dulwich/objects.py b/dulwich/objects.py index 03426eda1..041dc0eb5 100644 --- a/dulwich/objects.py +++ b/dulwich/objects.py @@ -120,7 +120,7 @@ def hex_to_sha(hex): raise ValueError(exc.args[0]) from exc -def valid_hexsha(hex): +def valid_hexsha(hex) -> bool: if len(hex) != 40: return False try: @@ -168,7 +168,7 @@ def object_header(num_type: int, length: int) -> bytes: def serializable_property(name: str, docstring: Optional[str] = None): """A property that helps tracking whether serialization is necessary.""" - def set(obj, value): + def set(obj, value) -> None: setattr(obj, "_" + name, value) obj._needs_serialization = True @@ -189,7 +189,7 @@ def object_class(type: Union[bytes, int]) -> Optional[type["ShaFile"]]: return _TYPE_MAP.get(type, None) -def check_hexsha(hex, error_msg): +def check_hexsha(hex, error_msg) -> None: """Check if a string is a valid hex sha string. Args: @@ -202,7 +202,7 @@ def check_hexsha(hex, error_msg): raise ObjectFormatException(f"{error_msg} {hex}") -def check_identity(identity: bytes, error_msg: str) -> None: +def check_identity(identity: Optional[bytes], error_msg: str) -> None: """Check if the specified identity is valid. This will raise an exception if the identity is not valid. @@ -211,6 +211,8 @@ def check_identity(identity: bytes, error_msg: str) -> None: identity: Identity string error_msg: Error message to use in exception """ + if identity is None: + raise ObjectFormatException(error_msg) email_start = identity.find(b"<") email_end = identity.find(b">") if not all( @@ -226,7 +228,7 @@ def check_identity(identity: bytes, error_msg: str) -> None: raise ObjectFormatException(error_msg) -def check_time(time_seconds): +def check_time(time_seconds) -> None: """Check if the specified time is not prone to overflow error. This will raise an exception if the time is not valid. @@ -491,7 +493,7 @@ def from_string(cls, string): obj.set_raw_string(string) return obj - def _check_has_member(self, member, error_msg): + def _check_has_member(self, member, error_msg) -> None: """Check that the object has a given member variable. Args: @@ -598,7 +600,7 @@ def __init__(self) -> None: def _get_data(self): return self.as_raw_string() - def _set_data(self, data): + def _set_data(self, data) -> None: self.set_raw_string(data) data = property( @@ -608,13 +610,13 @@ def _set_data(self, data): def _get_chunked(self): return self._chunked_text - def _set_chunked(self, chunks: list[bytes]): + def _set_chunked(self, chunks: list[bytes]) -> None: self._chunked_text = chunks def _serialize(self): return self._chunked_text - def _deserialize(self, chunks): + def _deserialize(self, chunks) -> None: self._chunked_text = chunks chunked = property( @@ -630,7 +632,7 @@ def from_path(cls, path): raise NotBlobError(path) return blob - def check(self): + def check(self) -> None: """Check this object for internal consistency. Raises: @@ -759,7 +761,7 @@ def __init__(self) -> None: self._tag_time = None self._tag_timezone = None self._tag_timezone_neg_utc = False - self._signature = None + self._signature: Optional[bytes] = None @classmethod def from_path(cls, filename): @@ -768,7 +770,7 @@ def from_path(cls, filename): raise NotTagError(filename) return tag - def check(self): + def check(self) -> None: """Check this object for internal consistency. Raises: @@ -829,7 +831,7 @@ def _serialize(self): body = (self.message or b"") + (self._signature or b"") return list(_format_message(headers, body)) - def _deserialize(self, chunks): + def _deserialize(self, chunks) -> None: """Grab the metadata attached to the tag.""" self._tagger = None self._tag_time = None @@ -866,7 +868,7 @@ def _deserialize(self, chunks): self._message = value[:sig_idx] self._signature = value[sig_idx:] else: - raise ObjectFormatException(f"Unknown field {field}") + raise ObjectFormatException(f"Unknown field {field.decode('ascii', 'replace')}") def _get_object(self): """Get the object pointed to by this tag. @@ -875,7 +877,7 @@ def _get_object(self): """ return (self._object_class, self._object_sha) - def _set_object(self, value): + def _set_object(self, value) -> None: (self._object_class, self._object_sha) = value self._needs_serialization = True @@ -897,7 +899,7 @@ def _set_object(self, value): signature = serializable_property("signature", "Optional detached GPG signature") - def sign(self, keyid: Optional[str] = None): + def sign(self, keyid: Optional[str] = None) -> None: import gpg with gpg.Context(armor=True) as c: @@ -1118,7 +1120,7 @@ def __len__(self) -> int: def __iter__(self): return iter(self._entries) - def add(self, name, mode, hexsha): + def add(self, name, mode, hexsha) -> None: """Add an entry to the tree. Args: @@ -1147,7 +1149,7 @@ def items(self) -> list[TreeEntry]: """ return list(self.iteritems()) - def _deserialize(self, chunks): + def _deserialize(self, chunks) -> None: """Grab the entries in the tree.""" try: parsed_entries = parse_tree(b"".join(chunks)) @@ -1158,7 +1160,7 @@ def _deserialize(self, chunks): # genexp. self._entries = {n: (m, s) for n, m, s in parsed_entries} - def check(self): + def check(self) -> None: """Check this object for internal consistency. Raises: @@ -1385,12 +1387,12 @@ class Commit(ShaFile): def __init__(self) -> None: super().__init__() self._parents: list[bytes] = [] - self._encoding = None + self._encoding: Optional[bytes] = None self._mergetag: list[Tag] = [] - self._gpgsig = None - self._extra: list[tuple[bytes, bytes]] = [] - self._author_timezone_neg_utc = False - self._commit_timezone_neg_utc = False + self._gpgsig: Optional[bytes] = None + self._extra: list[tuple[bytes, Optional[bytes]]] = [] + self._author_timezone_neg_utc: Optional[bool] = False + self._commit_timezone_neg_utc: Optional[bool] = False @classmethod def from_path(cls, path): @@ -1399,7 +1401,7 @@ def from_path(cls, path): raise NotCommitError(path) return commit - def _deserialize(self, chunks): + def _deserialize(self, chunks) -> None: self._parents = [] self._extra = [] self._tree = None @@ -1444,7 +1446,7 @@ def _deserialize(self, chunks): (self._commit_timezone, self._commit_timezone_neg_utc), ) = commit_info - def check(self): + def check(self) -> None: """Check this object for internal consistency. Raises: @@ -1490,7 +1492,7 @@ def check(self): # TODO: optionally check for duplicate parents - def sign(self, keyid: Optional[str] = None): + def sign(self, keyid: Optional[str] = None) -> None: import gpg with gpg.Context(armor=True) as c: @@ -1506,7 +1508,7 @@ def sign(self, keyid: Optional[str] = None): self.as_raw_string(), mode=gpg.constants.sig.mode.DETACH ) - def verify(self, keyids: Optional[Iterable[str]] = None): + def verify(self, keyids: Optional[Iterable[str]] = None) -> None: """Verify GPG signature for this commit (if it is signed). Args: @@ -1583,7 +1585,7 @@ def _get_parents(self): """Return a list of parents of this commit.""" return self._parents - def _set_parents(self, value): + def _set_parents(self, value) -> None: """Set a list of parents of this commit.""" self._needs_serialization = True self._parents = value diff --git a/dulwich/pack.py b/dulwich/pack.py index c133ea98c..fc810250a 100644 --- a/dulwich/pack.py +++ b/dulwich/pack.py @@ -99,7 +99,7 @@ class UnresolvedDeltas(Exception): """Delta objects could not be resolved.""" - def __init__(self, shas): + def __init__(self, shas) -> None: self.shas = shas @@ -525,7 +525,7 @@ def _itersha(self) -> Iterator[bytes]: """Yield all the SHA1's of the objects in the index, sorted.""" raise NotImplementedError(self._itersha) - def close(self): + def close(self) -> None: pass def check(self) -> None: @@ -642,15 +642,15 @@ def _unpack_entry(self, i: int) -> PackIndexEntry: """ raise NotImplementedError(self._unpack_entry) - def _unpack_name(self, i): + def _unpack_name(self, i) -> bytes: """Unpack the i-th name from the index file.""" raise NotImplementedError(self._unpack_name) - def _unpack_offset(self, i): + def _unpack_offset(self, i) -> int: """Unpack the i-th object offset from the index file.""" raise NotImplementedError(self._unpack_offset) - def _unpack_crc32_checksum(self, i): + def _unpack_crc32_checksum(self, i) -> Optional[int]: """Unpack the crc32 checksum for the ith object from the index file.""" raise NotImplementedError(self._unpack_crc32_checksum) @@ -754,7 +754,7 @@ def iter_prefix(self, prefix: bytes) -> Iterator[bytes]: assert start <= end started = False for i in range(start, end): - name = self._unpack_name(i) + name: bytes = self._unpack_name(i) if name.startswith(prefix): yield name started = True @@ -782,7 +782,7 @@ def _unpack_offset(self, i): offset = (0x100 * 4) + (i * 24) return unpack_from(">L", self._contents, offset)[0] - def _unpack_crc32_checksum(self, i): + def _unpack_crc32_checksum(self, i) -> None: # Not stored in v1 index files return None @@ -1109,7 +1109,7 @@ def _read(self, read, size): self.outfile.write(data) return data - def verify(self, progress=None): + def verify(self, progress=None) -> None: """Verify a pack stream and write it to the output file. See PackStreamReader.iterobjects for a list of exceptions this may @@ -1231,7 +1231,7 @@ def from_file(cls, file, size=None): def from_path(cls, path): return cls(filename=path) - def close(self): + def close(self) -> None: self._file.close() def __enter__(self): @@ -1370,7 +1370,7 @@ def get_stored_checksum(self): self._file.seek(-20, SEEK_END) return self._file.read(20) - def check(self): + def check(self) -> None: """Check the consistency of this pack.""" actual = self.calculate_checksum() stored = self.get_stored_checksum() @@ -1611,7 +1611,7 @@ def read(self, num=None): self.sha1.update(data) return data - def check_sha(self): + def check_sha(self) -> None: stored = self.f.read(20) if stored != self.sha1.digest(): raise ChecksumMismatch(self.sha1.hexdigest(), sha_to_hex(stored)) @@ -1631,7 +1631,7 @@ def __init__(self, f) -> None: self.length = 0 self.sha1 = sha1(b"") - def write(self, data): + def write(self, data) -> None: self.sha1.update(data) self.f.write(data) self.length += len(data) @@ -1766,7 +1766,7 @@ def pack_header_chunks(num_objects): yield struct.pack(b">L", num_objects) # Number of objects in pack -def write_pack_header(write, num_objects): +def write_pack_header(write, num_objects) -> None: """Write a pack header for the given number of objects.""" if hasattr(write, "write"): write = write.write @@ -2470,7 +2470,7 @@ def index(self) -> PackIndex: self._idx = self._idx_load() return self._idx - def close(self): + def close(self) -> None: if self._data is not None: self._data.close() if self._idx is not None: diff --git a/dulwich/patch.py b/dulwich/patch.py index ae74615a9..f24c4db8f 100644 --- a/dulwich/patch.py +++ b/dulwich/patch.py @@ -35,7 +35,7 @@ FIRST_FEW_BYTES = 8000 -def write_commit_patch(f, commit, contents, progress, version=None, encoding=None): +def write_commit_patch(f, commit, contents, progress, version=None, encoding=None) -> None: """Write a individual file patch. Args: @@ -101,7 +101,7 @@ def get_summary(commit): # Unified Diff -def _format_range_unified(start, stop): +def _format_range_unified(start, stop) -> str: """Convert range to the "ed" format.""" # Per the diff spec at http://www.unix.org/single_unix_specification/ beginning = start + 1 # lines start numbering with one @@ -188,7 +188,7 @@ def patch_filename(p, root): return root + b"/" + p -def write_object_diff(f, store: ObjectContainer, old_file, new_file, diff_binary=False): +def write_object_diff(f, store: ObjectContainer, old_file, new_file, diff_binary=False) -> None: """Write the diff for an object. Args: @@ -279,7 +279,7 @@ def gen_diff_header(paths, modes, shas): # TODO(jelmer): Support writing unicode, rather than bytes. -def write_blob_diff(f, old_file, new_file): +def write_blob_diff(f, old_file, new_file) -> None: """Write blob diff. Args: @@ -314,7 +314,7 @@ def lines(blob): ) -def write_tree_diff(f, store, old_tree, new_tree, diff_binary=False): +def write_tree_diff(f, store, old_tree, new_tree, diff_binary=False) -> None: """Write tree diff. Args: diff --git a/dulwich/porcelain.py b/dulwich/porcelain.py index d76e28e1b..44a817ff8 100644 --- a/dulwich/porcelain.py +++ b/dulwich/porcelain.py @@ -140,16 +140,16 @@ class NoneStream(RawIOBase): """Fallback if stdout or stderr are unavailable, does nothing.""" - def read(self, size=-1): + def read(self, size=-1) -> None: return None - def readall(self): - return None + def readall(self) -> bytes: + return b"" - def readinto(self, b): + def readinto(self, b) -> None: return None - def write(self, b): + def write(self, b) -> None: return None @@ -325,7 +325,7 @@ def __init__(self, current_sha, new_sha) -> None: self.new_sha = new_sha -def check_diverged(repo, current_sha, new_sha): +def check_diverged(repo, current_sha, new_sha) -> None: """Check if updating to a sha can be done with fast forwarding. Args: @@ -346,7 +346,7 @@ def archive( committish=None, outstream=default_bytes_out_stream, errstream=default_bytes_err_stream, -): +) -> None: """Create an archive. Args: @@ -365,7 +365,7 @@ def archive( outstream.write(chunk) -def update_server_info(repo="."): +def update_server_info(repo=".") -> None: """Update server info files for a repository. Args: @@ -375,7 +375,7 @@ def update_server_info(repo="."): server_update_server_info(r) -def symbolic_ref(repo, ref_name, force=False): +def symbolic_ref(repo, ref_name, force=False) -> None: """Set git symbolic ref into HEAD. Args: @@ -390,7 +390,7 @@ def symbolic_ref(repo, ref_name, force=False): repo_obj.refs.set_symbolic_ref(b"HEAD", ref_path) -def pack_refs(repo, all=False): +def pack_refs(repo, all=False) -> None: with open_repo_closing(repo) as repo_obj: refs = repo_obj.refs packed_refs = { @@ -623,7 +623,7 @@ def _is_subdir(subdir, parentdir): # TODO: option to remove ignored files also, in line with `git clean -fdx` -def clean(repo=".", target_dir=None): +def clean(repo=".", target_dir=None) -> None: """Remove any untracked files from the target directory recursively. Equivalent to running ``git clean -fd`` in target_dir. @@ -669,7 +669,7 @@ def clean(repo=".", target_dir=None): os.remove(ap) -def remove(repo=".", paths=None, cached=False): +def remove(repo=".", paths=None, cached=False) -> None: """Remove files from the staging area. Args: @@ -736,7 +736,7 @@ def commit_encode(commit, contents, default_encoding=DEFAULT_ENCODING): return contents.encode(encoding) -def print_commit(commit, decode, outstream=sys.stdout): +def print_commit(commit, decode, outstream=sys.stdout) -> None: """Write a human-readable commit log entry. Args: @@ -764,7 +764,7 @@ def print_commit(commit, decode, outstream=sys.stdout): outstream.write("\n") -def print_tag(tag, decode, outstream=sys.stdout): +def print_tag(tag, decode, outstream=sys.stdout) -> None: """Write a human-readable tag. Args: @@ -782,7 +782,7 @@ def print_tag(tag, decode, outstream=sys.stdout): outstream.write("\n") -def show_blob(repo, blob, decode, outstream=sys.stdout): +def show_blob(repo, blob, decode, outstream=sys.stdout) -> None: """Write a blob to a stream. Args: @@ -794,7 +794,7 @@ def show_blob(repo, blob, decode, outstream=sys.stdout): outstream.write(decode(blob.data)) -def show_commit(repo, commit, decode, outstream=sys.stdout): +def show_commit(repo, commit, decode, outstream=sys.stdout) -> None: """Show a commit to a stream. Args: @@ -815,7 +815,7 @@ def show_commit(repo, commit, decode, outstream=sys.stdout): outstream.write(commit_decode(commit, diffstream.getvalue())) -def show_tree(repo, tree, decode, outstream=sys.stdout): +def show_tree(repo, tree, decode, outstream=sys.stdout) -> None: """Print a tree to a stream. Args: @@ -828,7 +828,7 @@ def show_tree(repo, tree, decode, outstream=sys.stdout): outstream.write(decode(n) + "\n") -def show_tag(repo, tag, decode, outstream=sys.stdout): +def show_tag(repo, tag, decode, outstream=sys.stdout) -> None: """Print a tag to a stream. Args: @@ -886,7 +886,7 @@ def log( max_entries=None, reverse=False, name_status=False, -): +) -> None: """Write commit logs. Args: @@ -917,7 +917,7 @@ def show( objects=None, outstream=sys.stdout, default_encoding=DEFAULT_ENCODING, -): +) -> None: """Print the changes in a commit. Args: @@ -947,7 +947,7 @@ def decode(x): show_object(r, o, decode, outstream) -def diff_tree(repo, old_tree, new_tree, outstream=default_bytes_out_stream): +def diff_tree(repo, old_tree, new_tree, outstream=default_bytes_out_stream) -> None: """Compares the content and mode of blobs found via two tree objects. Args: @@ -960,7 +960,7 @@ def diff_tree(repo, old_tree, new_tree, outstream=default_bytes_out_stream): write_tree_diff(outstream, r.object_store, old_tree, new_tree) -def rev_list(repo, commits, outstream=sys.stdout): +def rev_list(repo, commits, outstream=sys.stdout) -> None: """Lists commit objects in reverse chronological order. Args: @@ -980,7 +980,7 @@ def _canonical_part(url: str) -> str: return name -def submodule_add(repo, url, path=None, name=None): +def submodule_add(repo, url, path=None, name=None) -> None: """Add a new submodule. Args: @@ -1006,7 +1006,7 @@ def submodule_add(repo, url, path=None, name=None): config.write_to_path() -def submodule_init(repo): +def submodule_init(repo) -> None: """Initialize submodules. Args: @@ -1045,7 +1045,7 @@ def tag_create( tag_timezone=None, sign=False, encoding=DEFAULT_ENCODING, -): +) -> None: """Creates a tag in git via dulwich calls. Args: @@ -1104,7 +1104,7 @@ def tag_list(repo, outstream=sys.stdout): return tags -def tag_delete(repo, name): +def tag_delete(repo, name) -> None: """Remove a tag. Args: @@ -1122,7 +1122,7 @@ def tag_delete(repo, name): del r.refs[_make_tag_ref(name)] -def reset(repo, mode, treeish="HEAD"): +def reset(repo, mode, treeish="HEAD") -> None: """Reset current HEAD to the specified state. Args: @@ -1170,7 +1170,7 @@ def push( errstream=default_bytes_err_stream, force=False, **kwargs, -): +) -> None: """Remote push with dulwich via dulwich.client. Args: @@ -1255,7 +1255,7 @@ def pull( filter_spec=None, protocol_version=None, **kwargs, -): +) -> None: """Pull from remote via dulwich.client. Args: @@ -1280,7 +1280,7 @@ def pull( if refspecs is None: refspecs = [b"HEAD"] - def determine_wants(remote_refs, **kwargs): + def determine_wants(remote_refs, *args, **kwargs): selected_refs.extend( parse_reftuples(remote_refs, r.refs, refspecs, force=force) ) @@ -1494,7 +1494,7 @@ def get_tree_changes(repo): return tracked_changes -def daemon(path=".", address=None, port=None): +def daemon(path=".", address=None, port=None) -> None: """Run a daemon serving Git requests over TCP/IP. Args: @@ -1508,7 +1508,7 @@ def daemon(path=".", address=None, port=None): server.serve_forever() -def web_daemon(path=".", address=None, port=None): +def web_daemon(path=".", address=None, port=None) -> None: """Run a daemon serving Git requests over HTTP. Args: @@ -1535,7 +1535,7 @@ def web_daemon(path=".", address=None, port=None): server.serve_forever() -def upload_pack(path=".", inf=None, outf=None): +def upload_pack(path=".", inf=None, outf=None) -> int: """Upload a pack file after negotiating its contents using smart protocol. Args: @@ -1550,7 +1550,7 @@ def upload_pack(path=".", inf=None, outf=None): path = os.path.expanduser(path) backend = FileSystemBackend(path) - def send_fn(data): + def send_fn(data) -> None: outf.write(data) outf.flush() @@ -1561,7 +1561,7 @@ def send_fn(data): return 0 -def receive_pack(path=".", inf=None, outf=None): +def receive_pack(path=".", inf=None, outf=None) -> int: """Receive a pack file after negotiating its contents using smart protocol. Args: @@ -1576,7 +1576,7 @@ def receive_pack(path=".", inf=None, outf=None): path = os.path.expanduser(path) backend = FileSystemBackend(path) - def send_fn(data): + def send_fn(data) -> None: outf.write(data) outf.flush() @@ -1599,7 +1599,7 @@ def _make_tag_ref(name): return LOCAL_TAG_PREFIX + name -def branch_delete(repo, name): +def branch_delete(repo, name) -> None: """Delete a branch. Args: @@ -1615,7 +1615,7 @@ def branch_delete(repo, name): del r.refs[_make_branch_ref(name)] -def branch_create(repo, name, objectish=None, force=False): +def branch_create(repo, name, objectish=None, force=False) -> None: """Create a branch. Args: @@ -1800,7 +1800,7 @@ def ls_remote(remote, config: Optional[Config] = None, **kwargs): return client.get_refs(host_path) -def repack(repo): +def repack(repo) -> None: """Repack loose files in a repository. Currently this only packs loose objects. @@ -1820,7 +1820,7 @@ def pack_objects( delta_window_size=None, deltify=None, reuse_deltas=True, -): +) -> None: """Pack objects into a file. Args: @@ -1853,7 +1853,7 @@ def ls_tree( outstream=sys.stdout, recursive=False, name_only=False, -): +) -> None: """List contents of a tree. Args: @@ -1864,7 +1864,7 @@ def ls_tree( name_only: Only print item name """ - def list_tree(store, treeid, base): + def list_tree(store, treeid, base) -> None: for name, mode, sha in store[treeid].iteritems(): if base: name = posixpath.join(base, name) @@ -1880,7 +1880,7 @@ def list_tree(store, treeid, base): list_tree(r.object_store, tree.id, "") -def remote_add(repo: Repo, name: Union[bytes, str], url: Union[bytes, str]): +def remote_add(repo, name: Union[bytes, str], url: Union[bytes, str]) -> None: """Add a remote. Args: @@ -1901,7 +1901,7 @@ def remote_add(repo: Repo, name: Union[bytes, str], url: Union[bytes, str]): c.write_to_path() -def remote_remove(repo: Repo, name: Union[bytes, str]): +def remote_remove(repo: Repo, name: Union[bytes, str]) -> None: """Remove a remote. Args: @@ -1938,7 +1938,7 @@ def check_ignore(repo, paths, no_index=False): yield path -def update_head(repo, target, detached=False, new_branch=None): +def update_head(repo, target, detached=False, new_branch=None) -> None: """Update HEAD to point at a new branch/commit. Note that this does not actually update the working tree. @@ -1966,7 +1966,7 @@ def update_head(repo, target, detached=False, new_branch=None): r.refs.set_symbolic_ref(b"HEAD", to_set) -def reset_file(repo, file_path: str, target: bytes = b"HEAD", symlink_fn=None): +def reset_file(repo, file_path: str, target: bytes = b"HEAD", symlink_fn=None) -> None: """Reset the file to specific commit or branch. Args: @@ -2010,7 +2010,7 @@ def _update_head_during_checkout_branch(repo, target): return checkout_target -def checkout_branch(repo, target: Union[bytes, str], force: bool = False): +def checkout_branch(repo, target: Union[bytes, str], force: bool = False) -> None: """Switch branches or restore working tree files. The implementation of this function will probably not scale well @@ -2143,7 +2143,7 @@ def stash_list(repo): return enumerate(list(stash.stashes())) -def stash_push(repo): +def stash_push(repo) -> None: """Push a new stash onto the stack.""" with open_repo_closing(repo) as r: from .stash import Stash @@ -2152,16 +2152,16 @@ def stash_push(repo): stash.push() -def stash_pop(repo, index): +def stash_pop(repo) -> None: """Pop a stash from the stack.""" with open_repo_closing(repo) as r: from .stash import Stash stash = Stash.from_repo(r) - stash.pop(index) + stash.pop() -def stash_drop(repo, index): +def stash_drop(repo, index) -> None: """Drop a stash from the stack.""" with open_repo_closing(repo) as r: from .stash import Stash diff --git a/dulwich/protocol.py b/dulwich/protocol.py index c0128f158..4fc1fc612 100644 --- a/dulwich/protocol.py +++ b/dulwich/protocol.py @@ -23,6 +23,7 @@ from io import BytesIO from os import SEEK_END +from typing import Optional import dulwich @@ -221,9 +222,9 @@ def __init__(self, read, write, close=None, report_activity=None) -> None: self.write = write self._close = close self.report_activity = report_activity - self._readahead = None + self._readahead: Optional[BytesIO] = None - def close(self): + def close(self) -> None: if self._close: self._close() @@ -270,7 +271,7 @@ def read_pkt_line(self): ) return pkt_contents - def eof(self): + def eof(self) -> bool: """Test whether the protocol stream has reached EOF. Note that this refers to the actual stream EOF and not just a @@ -285,7 +286,7 @@ def eof(self): self.unread_pkt_line(next_line) return False - def unread_pkt_line(self, data): + def unread_pkt_line(self, data) -> None: """Unread a single line of data into the readahead buffer. This method can be used to unread a single pkt-line into a fixed @@ -312,7 +313,7 @@ def read_pkt_seq(self): yield pkt pkt = self.read_pkt_line() - def write_pkt_line(self, line): + def write_pkt_line(self, line) -> None: """Sends a pkt-line to the remote git process. Args: @@ -327,7 +328,7 @@ def write_pkt_line(self, line): except OSError as exc: raise GitProtocolError(str(exc)) from exc - def write_sideband(self, channel, blob): + def write_sideband(self, channel, blob) -> None: """Write multiplexed data to the sideband. Args: @@ -341,7 +342,7 @@ def write_sideband(self, channel, blob): self.write_pkt_line(bytes(bytearray([channel])) + blob[:65515]) blob = blob[65515:] - def send_cmd(self, cmd, *args): + def send_cmd(self, cmd, *args) -> None: """Send a command and some arguments to a git server. Only used for the TCP git protocol (git://). @@ -531,7 +532,7 @@ def __init__(self, write, bufsize=65515) -> None: self._wbuf = BytesIO() self._buflen = 0 - def write(self, data): + def write(self, data) -> None: """Write data, wrapping it in a pkt-line.""" line = pkt_line(data) line_len = len(line) @@ -546,7 +547,7 @@ def write(self, data): self._wbuf.write(saved) self._buflen += len(saved) - def flush(self): + def flush(self) -> None: """Flush all data from the buffer.""" data = self._wbuf.getvalue() if data: @@ -562,7 +563,7 @@ def __init__(self, handle_pkt) -> None: self.handle_pkt = handle_pkt self._readahead = BytesIO() - def parse(self, data): + def parse(self, data) -> None: """Parse a fragment of data and call back for any completed packets.""" self._readahead.write(data) buf = self._readahead.getvalue() diff --git a/dulwich/reflog.py b/dulwich/reflog.py index 9bea44857..973da34aa 100644 --- a/dulwich/reflog.py +++ b/dulwich/reflog.py @@ -90,7 +90,7 @@ def read_reflog(f): yield parse_reflog_line(line) -def drop_reflog_entry(f, index, rewrite=False): +def drop_reflog_entry(f, index, rewrite=False) -> None: """Drop the specified reflog entry. Args: diff --git a/dulwich/refs.py b/dulwich/refs.py index e6c00adf3..d98b789b8 100644 --- a/dulwich/refs.py +++ b/dulwich/refs.py @@ -24,7 +24,7 @@ import os import warnings from contextlib import suppress -from typing import Any, Optional +from typing import Any, Optional, Iterator from .errors import PackedRefsException, RefFormatError from .file import GitFile, ensure_dir_exists @@ -65,7 +65,7 @@ def parse_symref_value(contents: bytes) -> bytes: raise ValueError(contents) -def check_ref_format(refname: Ref): +def check_ref_format(refname: Ref) -> bool: """Check if a refname is correctly formatted. Implements all the same rules as git-check-ref-format[1]. @@ -114,7 +114,7 @@ def _log( timestamp=None, timezone=None, message=None, - ): + ) -> None: if self._logger is None: return if message is None: @@ -129,7 +129,7 @@ def set_symbolic_ref( timestamp=None, timezone=None, message=None, - ): + ) -> None: """Make a ref point at another ref. Args: @@ -139,7 +139,7 @@ def set_symbolic_ref( """ raise NotImplementedError(self.set_symbolic_ref) - def get_packed_refs(self): + def get_packed_refs(self) -> dict[Ref, ObjectID]: """Get contents of the packed-refs file. Returns: Dictionary mapping ref names to SHA1s @@ -149,7 +149,7 @@ def get_packed_refs(self): """ raise NotImplementedError(self.get_packed_refs) - def add_packed_refs(self, new_refs: dict[Ref, Optional[ObjectID]]): + def add_packed_refs(self, new_refs: dict[Ref, Optional[ObjectID]]) -> None: """Add the given refs as packed refs. Args: @@ -158,7 +158,7 @@ def add_packed_refs(self, new_refs: dict[Ref, Optional[ObjectID]]): """ raise NotImplementedError(self.add_packed_refs) - def get_peeled(self, name): + def get_peeled(self, name) -> Optional[ObjectID]: """Return the cached peeled value of a ref, if available. Args: @@ -178,7 +178,7 @@ def import_refs( timezone: Optional[bytes] = None, message: Optional[bytes] = None, prune: bool = False, - ): + ) -> None: if prune: to_delete = set(self.subkeys(base)) else: @@ -198,7 +198,7 @@ def import_refs( for ref in to_delete: self.remove_if_equals(b"/".join((base, ref)), None, message=message) - def allkeys(self): + def allkeys(self) -> Iterator[Ref]: """All refs present in this container.""" raise NotImplementedError(self.allkeys) @@ -249,7 +249,7 @@ def as_dict(self, base=None) -> dict[Ref, ObjectID]: return ret - def _check_refname(self, name): + def _check_refname(self, name) -> None: """Ensure a refname is valid and lives in refs or is HEAD. HEAD is not a valid refname according to git-check-ref-format, but this @@ -281,7 +281,7 @@ def read_ref(self, refname): contents = self.get_packed_refs().get(refname, None) return contents - def read_loose_ref(self, name): + def read_loose_ref(self, name) -> bytes: """Read a loose reference and return its contents. Args: @@ -335,7 +335,7 @@ def set_if_equals( timestamp=None, timezone=None, message=None, - ): + ) -> bool: """Set a refname to new_ref only if it currently equals old_ref. This method follows all symbolic references if applicable for the @@ -354,7 +354,7 @@ def set_if_equals( def add_if_new( self, name, ref, committer=None, timestamp=None, timezone=None, message=None - ): + ) -> bool: """Add a new reference only if it does not already exist. Args: @@ -389,7 +389,7 @@ def remove_if_equals( timestamp=None, timezone=None, message=None, - ): + ) -> bool: """Remove a refname only if it currently equals old_ref. This method does not follow symbolic references, even if applicable for @@ -458,7 +458,7 @@ def read_loose_ref(self, name): def get_packed_refs(self): return {} - def _notify(self, ref, newsha): + def _notify(self, ref, newsha) -> None: for watcher in self._watchers: watcher._notify((ref, newsha)) @@ -470,7 +470,7 @@ def set_symbolic_ref( timestamp=None, timezone=None, message=None, - ): + ) -> None: old = self.follow(name)[-1] new = SYMREF + other self._refs[name] = new @@ -494,7 +494,7 @@ def set_if_equals( timestamp=None, timezone=None, message=None, - ): + ) -> bool: if old_ref is not None and self._refs.get(name, ZERO_SHA) != old_ref: return False realnames, _ = self.follow(name) @@ -522,7 +522,7 @@ def add_if_new( timestamp=None, timezone=None, message: Optional[bytes] = None, - ): + ) -> bool: if name in self._refs: return False self._refs[name] = ref @@ -546,7 +546,7 @@ def remove_if_equals( timestamp=None, timezone=None, message=None, - ): + ) -> bool: if old_ref is not None and self._refs.get(name, ZERO_SHA) != old_ref: return False try: @@ -569,14 +569,14 @@ def remove_if_equals( def get_peeled(self, name): return self._peeled.get(name) - def _update(self, refs): + def _update(self, refs) -> None: """Update multiple refs; intended only for testing.""" # TODO(dborowitz): replace this with a public function that uses # set_if_equal. for ref, sha in refs.items(): self.set_if_equals(ref, None, sha) - def _update_peeled(self, peeled): + def _update_peeled(self, peeled) -> None: """Update cached peeled refs; intended only for testing.""" self._peeled.update(peeled) @@ -704,7 +704,7 @@ def get_packed_refs(self): self._packed_refs[name] = sha return self._packed_refs - def add_packed_refs(self, new_refs: dict[Ref, Optional[ObjectID]]): + def add_packed_refs(self, new_refs: dict[Ref, Optional[ObjectID]]) -> None: """Add the given refs as packed refs. Args: @@ -789,7 +789,7 @@ def read_loose_ref(self, name): # errors depending on the specific operating system return None - def _remove_packed_ref(self, name): + def _remove_packed_ref(self, name) -> None: if self._packed_refs is None: return filename = os.path.join(self.path, b"packed-refs") @@ -818,7 +818,7 @@ def set_symbolic_ref( timestamp=None, timezone=None, message=None, - ): + ) -> None: """Make a ref point at another ref. Args: @@ -857,7 +857,7 @@ def set_if_equals( timestamp=None, timezone=None, message=None, - ): + ) -> bool: """Set a refname to new_ref only if it currently equals old_ref. This method follows all symbolic references, and can be used to perform @@ -925,7 +925,7 @@ def add_if_new( timestamp=None, timezone=None, message: Optional[bytes] = None, - ): + ) -> bool: """Add a new reference only if it does not already exist. This method follows symrefs, and only ensures that the last ref in the @@ -976,7 +976,7 @@ def remove_if_equals( timestamp=None, timezone=None, message=None, - ): + ) -> bool: """Remove a refname only if it currently equals old_ref. This method does not follow symbolic references. It can be used to @@ -1111,7 +1111,7 @@ def read_packed_refs_with_peeled(f): yield (sha, name, None) -def write_packed_refs(f, packed_refs, peeled_refs=None): +def write_packed_refs(f, packed_refs, peeled_refs=None) -> None: """Write a packed refs file. Args: @@ -1180,7 +1180,7 @@ def split_peeled_refs(refs): return regular, peeled -def _set_origin_head(refs, origin, origin_head): +def _set_origin_head(refs, origin, origin_head) -> None: # set refs/remotes/origin/HEAD origin_base = b"refs/remotes/" + origin + b"/" if origin_head and origin_head.startswith(LOCAL_BRANCH_PREFIX): @@ -1251,7 +1251,7 @@ def _import_remote_refs( message: Optional[bytes] = None, prune: bool = False, prune_tags: bool = False, -): +) -> None: stripped_refs = strip_peeled_refs(refs) branches = { n[len(LOCAL_BRANCH_PREFIX) :]: v diff --git a/dulwich/repo.py b/dulwich/repo.py index 819080a9f..f856db320 100644 --- a/dulwich/repo.py +++ b/dulwich/repo.py @@ -232,7 +232,7 @@ def get_user_identity(config: "StackedConfig", kind: Optional[str] = None) -> by return user + b" <" + email + b">" -def check_user_identity(identity): +def check_user_identity(identity) -> None: """Verify that a user identity is formatted correctly. Args: @@ -304,7 +304,7 @@ def serialize_graftpoints(graftpoints: dict[bytes, list[bytes]]) -> bytes: return b"\n".join(graft_lines) -def _set_filesystem_hidden(path): +def _set_filesystem_hidden(path) -> None: """Mark path as to be hidden if supported by platform and filesystem. On win32 uses SetFileAttributesW api: @@ -427,7 +427,7 @@ def get_named_file(self, path: str) -> Optional[BinaryIO]: """ raise NotImplementedError(self.get_named_file) - def _put_named_file(self, path: str, contents: bytes): + def _put_named_file(self, path: str, contents: bytes) -> None: """Write a file to the control dir with the given name and contents. Args: @@ -436,7 +436,7 @@ def _put_named_file(self, path: str, contents: bytes): """ raise NotImplementedError(self._put_named_file) - def _del_named_file(self, path: str): + def _del_named_file(self, path: str) -> None: """Delete a file in the control directory with the given name.""" raise NotImplementedError(self._del_named_file) @@ -549,7 +549,7 @@ def find_missing_objects( return None class DummyMissingObjectFinder: - def get_remote_has(self): + def get_remote_has(self) -> None: return None def __len__(self) -> int: @@ -702,7 +702,7 @@ def get_worktree_config(self) -> "ConfigFile": """Retrieve the worktree config object.""" raise NotImplementedError(self.get_worktree_config) - def get_description(self): + def get_description(self) -> Optional[str]: """Retrieve the description for this repository. Returns: String with the description of the repository @@ -710,7 +710,7 @@ def get_description(self): """ raise NotImplementedError(self.get_description) - def set_description(self, description): + def set_description(self, description) -> None: """Set the description for this repository. Args: @@ -748,7 +748,7 @@ def get_shallow(self) -> set[ObjectID]: with f: return {line.strip() for line in f} - def update_shallow(self, new_shallow, new_unshallow): + def update_shallow(self, new_shallow, new_unshallow) -> None: """Update the list of shallow objects. Args: @@ -884,7 +884,7 @@ def _get_user_identity( ) return get_user_identity(config) - def _add_graftpoints(self, updated_graftpoints: dict[bytes, list[bytes]]): + def _add_graftpoints(self, updated_graftpoints: dict[bytes, list[bytes]]) -> None: """Add or modify graftpoints. Args: @@ -1215,7 +1215,7 @@ def __init__( def _write_reflog( self, ref, old_sha, new_sha, committer, timestamp, timezone, message - ): + ) -> None: from .reflog import format_reflog_line path = os.path.join(self.controldir(), "logs", os.fsdecode(ref)) @@ -1305,7 +1305,7 @@ def _determine_symlinks(self): # TODO(jelmer): Actually probe disk / look at filesystem return sys.platform != "win32" - def _put_named_file(self, path, contents): + def _put_named_file(self, path, contents) -> None: """Write a file to the control dir with the given name and contents. Args: @@ -1316,7 +1316,7 @@ def _put_named_file(self, path, contents): with GitFile(os.path.join(self.controldir(), path), "wb") as f: f.write(contents) - def _del_named_file(self, path): + def _del_named_file(self, path) -> None: try: os.unlink(os.path.join(self.controldir(), path)) except FileNotFoundError: @@ -1362,7 +1362,7 @@ def open_index(self) -> "Index": raise NoIndexPresent return Index(self.index_path()) - def has_index(self): + def has_index(self) -> bool: """Check if an index is present.""" # Bare repos must never have index files; non-bare repos may have a # missing index file, which is treated as empty. @@ -1434,7 +1434,7 @@ def stage( index[tree_path] = index_entry_from_stat(st, blob.id) index.write() - def unstage(self, fs_paths: list[str]): + def unstage(self, fs_paths: list[str]) -> None: """Unstage specific file in the index Args: fs_paths: a list of files to unstage, @@ -1613,7 +1613,7 @@ def reset_index(self, tree: Optional[bytes] = None): symlink_fn = symlink else: - def symlink_fn(source, target): # type: ignore + def symlink_fn(source, target) -> None: # type: ignore with open( target, "w" + ("b" if isinstance(source, bytes) else "") ) as f: @@ -1670,7 +1670,7 @@ def get_description(self): def __repr__(self) -> str: return f"" - def set_description(self, description): + def set_description(self, description) -> None: """Set the description for this repository. Args: @@ -1801,7 +1801,7 @@ def init_bare( create = init_bare - def close(self): + def close(self) -> None: """Close any files opened by this repository.""" self.object_store.close() @@ -1846,10 +1846,10 @@ def __init__(self) -> None: self._config = ConfigFile() self._description = None - def _append_reflog(self, *args): + def _append_reflog(self, *args) -> None: self._reflog.append(args) - def set_description(self, description): + def set_description(self, description) -> None: self._description = description def get_description(self): @@ -1869,7 +1869,7 @@ def _determine_symlinks(self): """ return sys.platform != "win32" - def _put_named_file(self, path, contents): + def _put_named_file(self, path, contents) -> None: """Write a file to the control dir with the given name and contents. Args: @@ -1878,7 +1878,7 @@ def _put_named_file(self, path, contents): """ self._named_files[path] = contents - def _del_named_file(self, path): + def _del_named_file(self, path) -> None: try: del self._named_files[path] except KeyError: @@ -1900,7 +1900,7 @@ def get_named_file(self, path, basedir=None): return None return BytesIO(contents) - def open_index(self): + def open_index(self) -> "Index": """Fail to open index for this repo, since it is bare. Raises: diff --git a/dulwich/server.py b/dulwich/server.py index 9f8083fad..e75967acd 100644 --- a/dulwich/server.py +++ b/dulwich/server.py @@ -51,8 +51,7 @@ import zlib from collections.abc import Iterable from functools import partial -from typing import Optional, cast -from typing import Protocol as TypingProtocol +from typing import Optional, cast, Iterator, Protocol as TypingProtocol, Type from dulwich import log_utils @@ -121,7 +120,7 @@ class Backend: """A backend for the Git smart server implementation.""" - def open_repository(self, path): + def open_repository(self, path) -> BackendRepo: """Open the repository at a path. Args: @@ -164,7 +163,7 @@ def get_peeled(self, name: bytes) -> Optional[bytes]: def find_missing_objects( self, determine_wants, graph_walker, progress, get_tagged=None - ): + ) -> Iterator[ObjectID]: """Yield the objects required for a list of commits. Args: @@ -181,7 +180,7 @@ class DictBackend(Backend): def __init__(self, repos) -> None: self.repos = repos - def open_repository(self, path: str) -> BaseRepo: + def open_repository(self, path: str) -> BackendRepo: logger.debug("Opening repository at %s", path) try: return self.repos[path] @@ -314,15 +313,15 @@ def required_capabilities(cls): CAPABILITY_OFS_DELTA, ) - def progress(self, message: bytes): + def progress(self, message: bytes) -> None: pass - def _start_pack_send_phase(self): + def _start_pack_send_phase(self) -> None: if self.has_capability(CAPABILITY_SIDE_BAND_64K): # The provided haves are processed, and it is safe to send side- # band data now. if not self.has_capability(CAPABILITY_NO_PROGRESS): - self.progress = partial( + self.progress = partial( # type: ignore self.proto.write_sideband, SIDE_BAND_CHANNEL_PROGRESS ) @@ -364,7 +363,7 @@ def get_tagged(self, refs=None, repo=None) -> dict[ObjectID, ObjectID]: tagged[peeled_sha] = sha return tagged - def handle(self): + def handle(self) -> None: # Note the fact that client is only processing responses related # to the have lines it sent, and any other data (including side- # band) will be be considered a fatal error. @@ -501,7 +500,7 @@ def get_parents(sha): return shallow, not_shallow -def _want_satisfied(store: ObjectContainer, haves, want, earliest): +def _want_satisfied(store: ObjectContainer, haves, want, earliest) -> bool: o = store[want] pending = collections.deque([o]) known = {want} @@ -524,7 +523,7 @@ def _want_satisfied(store: ObjectContainer, haves, want, earliest): return False -def _all_wants_satisfied(store: ObjectContainer, haves, wants): +def _all_wants_satisfied(store: ObjectContainer, haves, wants) -> bool: """Check whether all the current wants are satisfied by a set of haves. Args: @@ -547,6 +546,15 @@ def _all_wants_satisfied(store: ObjectContainer, haves, wants): return True +class AckGraphWalkerImpl: + + def __init__(self, graph_walker): + raise NotImplementedError + + def ack(self, have_ref: ObjectID) -> None: + raise NotImplementedError + + class _ProtocolGraphWalker: """A graph walker that knows the git protocol. @@ -578,7 +586,7 @@ def __init__( self._cached = False self._cache: list[bytes] = [] self._cache_index = 0 - self._impl = None + self._impl: Optional[AckGraphWalkerImpl] = None def determine_wants(self, heads, depth=None): """Determine the wants for a set of heads. @@ -662,12 +670,12 @@ def determine_wants(self, heads, depth=None): return want_revs - def unread_proto_line(self, command, value): + def unread_proto_line(self, command, value) -> None: if isinstance(value, int): value = str(value).encode("ascii") self.proto.unread_pkt_line(command + b" " + value) - def nak(self): + def nak(self) -> None: pass def ack(self, have_ref): @@ -675,7 +683,7 @@ def ack(self, have_ref): raise ValueError(f"invalid sha {have_ref!r}") return self._impl.ack(have_ref) - def reset(self): + def reset(self) -> None: self._cached = True self._cache_index = 0 @@ -703,7 +711,7 @@ def read_proto_line(self, allowed): """ return _split_proto_line(self.proto.read_pkt_line(), allowed) - def _handle_shallow_request(self, wants): + def _handle_shallow_request(self, wants) -> None: while True: command, val = self.read_proto_line((COMMAND_DEEPEN, COMMAND_SHALLOW)) if command == COMMAND_DEEPEN: @@ -727,21 +735,21 @@ def _handle_shallow_request(self, wants): self.proto.write_pkt_line(None) - def notify_done(self): + def notify_done(self) -> None: # relay the message down to the handler. self.handler.notify_done() - def send_ack(self, sha, ack_type=b""): + def send_ack(self, sha, ack_type=b"") -> None: self.proto.write_pkt_line(format_ack_line(sha, ack_type)) - def send_nak(self): + def send_nak(self) -> None: self.proto.write_pkt_line(NAK_LINE) def handle_done(self, done_required, done_received): # Delegate this to the implementation. return self._impl.handle_done(done_required, done_received) - def set_wants(self, wants): + def set_wants(self, wants) -> None: self._wants = wants def all_wants_satisfied(self, haves): @@ -754,8 +762,8 @@ def all_wants_satisfied(self, haves): """ return _all_wants_satisfied(self.store, haves, self._wants) - def set_ack_type(self, ack_type): - impl_classes = { + def set_ack_type(self, ack_type) -> None: + impl_classes: dict[int, Type[AckGraphWalkerImpl]] = { MULTI_ACK: MultiAckGraphWalkerImpl, MULTI_ACK_DETAILED: MultiAckDetailedGraphWalkerImpl, SINGLE_ACK: SingleAckGraphWalkerImpl, @@ -766,14 +774,14 @@ def set_ack_type(self, ack_type): _GRAPH_WALKER_COMMANDS = (COMMAND_HAVE, COMMAND_DONE, None) -class SingleAckGraphWalkerImpl: +class SingleAckGraphWalkerImpl(AckGraphWalkerImpl): """Graph walker implementation that speaks the single-ack protocol.""" def __init__(self, walker) -> None: self.walker = walker self._common: list[bytes] = [] - def ack(self, have_ref): + def ack(self, have_ref) -> None: if not self._common: self.walker.send_ack(have_ref) self._common.append(have_ref) @@ -789,7 +797,7 @@ def next(self): __next__ = next - def handle_done(self, done_required, done_received): + def handle_done(self, done_required, done_received) -> bool: if not self._common: self.walker.send_nak() @@ -810,7 +818,7 @@ def handle_done(self, done_required, done_received): return True -class MultiAckGraphWalkerImpl: +class MultiAckGraphWalkerImpl(AckGraphWalkerImpl): """Graph walker implementation that speaks the multi-ack protocol.""" def __init__(self, walker) -> None: @@ -818,7 +826,7 @@ def __init__(self, walker) -> None: self._found_base = False self._common: list[bytes] = [] - def ack(self, have_ref): + def ack(self, have_ref) -> None: self._common.append(have_ref) if not self._found_base: self.walker.send_ack(have_ref, b"continue") @@ -845,7 +853,7 @@ def next(self): __next__ = next - def handle_done(self, done_required, done_received): + def handle_done(self, done_required, done_received) -> bool: if done_required and not done_received: # we are not done, especially when done is required; skip # the pack for this request and especially do not handle @@ -869,14 +877,14 @@ def handle_done(self, done_required, done_received): return True -class MultiAckDetailedGraphWalkerImpl: +class MultiAckDetailedGraphWalkerImpl(AckGraphWalkerImpl): """Graph walker implementation speaking the multi-ack-detailed protocol.""" def __init__(self, walker) -> None: self.walker = walker self._common: list[bytes] = [] - def ack(self, have_ref): + def ack(self, have_ref) -> None: # Should only be called iff have_ref is common self._common.append(have_ref) self.walker.send_ack(have_ref, b"common") @@ -910,7 +918,7 @@ def next(self): __next__ = next - def handle_done(self, done_required, done_received): + def handle_done(self, done_required, done_received) -> bool: if done_required and not done_received: # we are not done, especially when done is required; skip # the pack for this request and especially do not handle @@ -1022,14 +1030,14 @@ def _report_status(self, status: list[tuple[bytes, bytes]]) -> None: ) write = writer.write - def flush(): + def flush() -> None: writer.flush() self.proto.write_pkt_line(None) else: write = self.proto.write_pkt_line - def flush(): + def flush() -> None: pass for name, msg in status: @@ -1042,7 +1050,7 @@ def flush(): write(None) flush() - def _on_post_receive(self, client_refs): + def _on_post_receive(self, client_refs) -> None: hook = self.repo.hooks.get("post-receive", None) if not hook: return @@ -1155,7 +1163,7 @@ def __init__(self, handlers, *args, **kwargs) -> None: self.handlers = handlers socketserver.StreamRequestHandler.__init__(self, *args, **kwargs) - def handle(self): + def handle(self) -> None: proto = ReceivableProtocol(self.connection.recv, self.wfile.write) command, args = proto.read_cmd() logger.info("Handling %s request, args=%s", command, args) @@ -1163,7 +1171,7 @@ def handle(self): cls = self.handlers.get(command, None) if not callable(cls): raise GitProtocolError(f"Invalid service {command}") - h = cls(self.server.backend, args, proto) + h = cls(self.server.backend, args, proto) # type: ignore h.handle() @@ -1182,18 +1190,18 @@ def __init__(self, backend, listen_addr, port=TCP_GIT_PORT, handlers=None) -> No logger.info("Listening for TCP connections on %s:%d", listen_addr, port) socketserver.TCPServer.__init__(self, (listen_addr, port), self._make_handler) - def verify_request(self, request, client_address): + def verify_request(self, request, client_address) -> bool: logger.info("Handling request from %s", client_address) return True - def handle_error(self, request, client_address): + def handle_error(self, request, client_address) -> None: logger.exception( "Exception happened during processing of request " "from %s", client_address, ) -def main(argv=sys.argv): +def main(argv=sys.argv) -> None: """Entry point for starting a TCP git server.""" import optparse @@ -1228,7 +1236,7 @@ def main(argv=sys.argv): def serve_command( handler_cls, argv=sys.argv, backend=None, inf=sys.stdin, outf=sys.stdout -): +) -> int: """Serve a single command. This is mostly useful for the implementation of commands used by e.g. @@ -1245,7 +1253,7 @@ def serve_command( if backend is None: backend = FileSystemBackend() - def send_fn(data): + def send_fn(data) -> None: outf.write(data) outf.flush() @@ -1268,7 +1276,7 @@ def generate_objects_info_packs(repo): yield (b"P " + os.fsencode(pack.data.filename) + b"\n") -def update_server_info(repo): +def update_server_info(repo) -> None: """Generate server info for dumb file access. This generates info/refs and objects/info/packs, diff --git a/dulwich/stash.py b/dulwich/stash.py index 0b577241a..aeda4bf20 100644 --- a/dulwich/stash.py +++ b/dulwich/stash.py @@ -55,7 +55,7 @@ def from_repo(cls, repo): """Create a new stash from a Repo object.""" return cls(repo) - def drop(self, index): + def drop(self, index) -> None: """Drop entry with specified index.""" with open(self._reflog_path, "rb+") as f: drop_reflog_entry(f, index, rewrite=True) diff --git a/dulwich/tests/test_object_store.py b/dulwich/tests/test_object_store.py index c012706ed..347b1e628 100644 --- a/dulwich/tests/test_object_store.py +++ b/dulwich/tests/test_object_store.py @@ -26,15 +26,22 @@ from dulwich.object_store import ( iter_tree_contents, peel_sha, + PackBasedObjectStore, ) from dulwich.objects import ( Blob, TreeEntry, + Tree, ) from dulwich.protocol import DEPTH_INFINITE from .utils import make_object, make_tag +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from dulwich.object_store import BaseObjectStore + try: from unittest.mock import patch except ImportError: @@ -45,19 +52,36 @@ class ObjectStoreTests: - def test_determine_wants_all(self): + store: "BaseObjectStore" + + def assertEqual(self, a, b) -> None: + raise NotImplementedError + + def assertRaises(self, exc, func) -> None: + raise NotImplementedError + + def assertNotIn(self, a, b) -> None: + raise NotImplementedError + + def assertNotEqual(self, a, b) -> None: + raise NotImplementedError + + def assertIn(self, a, b) -> None: + raise NotImplementedError + + def test_determine_wants_all(self) -> None: self.assertEqual( [b"1" * 40], self.store.determine_wants_all({b"refs/heads/foo": b"1" * 40}), ) - def test_determine_wants_all_zero(self): + def test_determine_wants_all_zero(self) -> None: self.assertEqual( [], self.store.determine_wants_all({b"refs/heads/foo": b"0" * 40}) ) @skipUnless(patch, "Required mock.patch") - def test_determine_wants_all_depth(self): + def test_determine_wants_all_depth(self) -> None: self.store.add_object(testobject) refs = {b"refs/heads/foo": testobject.id} with patch.object(self.store, "_get_depth", return_value=1) as m: @@ -74,7 +98,7 @@ def test_determine_wants_all_depth(self): [testobject.id], self.store.determine_wants_all(refs, depth=2) ) - def test_get_depth(self): + def test_get_depth(self) -> None: self.assertEqual(0, self.store._get_depth(testobject.id)) self.store.add_object(testobject) @@ -92,24 +116,24 @@ def test_get_depth(self): ), ) - def test_iter(self): + def test_iter(self) -> None: self.assertEqual([], list(self.store)) - def test_get_nonexistant(self): + def test_get_nonexistant(self) -> None: self.assertRaises(KeyError, lambda: self.store[b"a" * 40]) - def test_contains_nonexistant(self): + def test_contains_nonexistant(self) -> None: self.assertNotIn(b"a" * 40, self.store) - def test_add_objects_empty(self): + def test_add_objects_empty(self) -> None: self.store.add_objects([]) - def test_add_commit(self): + def test_add_commit(self) -> None: # TODO: Argh, no way to construct Git commit objects without # access to a serialized form. self.store.add_objects([]) - def test_store_resilience(self): + def test_store_resilience(self) -> None: """Test if updating an existing stored object doesn't erase the object from the store. """ @@ -123,14 +147,14 @@ def test_store_resilience(self): self.assertNotEqual(test_object.id, stored_test_object.id) self.assertEqual(stored_test_object.id, test_object_id) - def test_add_object(self): + def test_add_object(self) -> None: self.store.add_object(testobject) self.assertEqual({testobject.id}, set(self.store)) self.assertIn(testobject.id, self.store) r = self.store[testobject.id] self.assertEqual(r, testobject) - def test_add_objects(self): + def test_add_objects(self) -> None: data = [(testobject, "mypath")] self.store.add_objects(data) self.assertEqual({testobject.id}, set(self.store)) @@ -138,7 +162,7 @@ def test_add_objects(self): r = self.store[testobject.id] self.assertEqual(r, testobject) - def test_tree_changes(self): + def test_tree_changes(self) -> None: blob_a1 = make_object(Blob, data=b"a1") blob_a2 = make_object(Blob, data=b"a2") blob_b = make_object(Blob, data=b"b") @@ -163,7 +187,7 @@ def test_tree_changes(self): list(self.store.tree_changes(tree1_id, tree2_id, want_unchanged=True)), ) - def test_iter_tree_contents(self): + def test_iter_tree_contents(self) -> None: blob_a = make_object(Blob, data=b"a") blob_b = make_object(Blob, data=b"b") blob_c = make_object(Blob, data=b"c") @@ -184,7 +208,7 @@ def test_iter_tree_contents(self): ) self.assertEqual([], list(iter_tree_contents(self.store, None))) - def test_iter_tree_contents_include_trees(self): + def test_iter_tree_contents_include_trees(self) -> None: blob_a = make_object(Blob, data=b"a") blob_b = make_object(Blob, data=b"b") blob_c = make_object(Blob, data=b"c") @@ -198,7 +222,9 @@ def test_iter_tree_contents_include_trees(self): ] tree_id = commit_tree(self.store, blobs) tree = self.store[tree_id] + assert isinstance(tree, Tree) tree_ad = self.store[tree[b"ad"][1]] + assert isinstance(tree_ad, Tree) tree_bd = self.store[tree_ad[b"bd"][1]] expected = [ @@ -217,7 +243,7 @@ def make_tag(self, name, obj): self.store.add_object(tag) return tag - def test_peel_sha(self): + def test_peel_sha(self) -> None: self.store.add_object(testobject) tag1 = self.make_tag(b"1", testobject) tag2 = self.make_tag(b"2", testobject) @@ -225,18 +251,18 @@ def test_peel_sha(self): for obj in [testobject, tag1, tag2, tag3]: self.assertEqual((obj, testobject), peel_sha(self.store, obj.id)) - def test_get_raw(self): + def test_get_raw(self) -> None: self.store.add_object(testobject) self.assertEqual( (Blob.type_num, b"yummy data"), self.store.get_raw(testobject.id) ) - def test_close(self): + def test_close(self) -> None: # For now, just check that close doesn't barf. self.store.add_object(testobject) self.store.close() - def test_iter_prefix(self): + def test_iter_prefix(self) -> None: self.store.add_object(testobject) self.assertEqual([testobject.id], list(self.store.iter_prefix(testobject.id))) self.assertEqual( @@ -247,19 +273,21 @@ def test_iter_prefix(self): ) self.assertEqual([testobject.id], list(self.store.iter_prefix(b""))) - def test_iter_prefix_not_found(self): + def test_iter_prefix_not_found(self) -> None: self.assertEqual([], list(self.store.iter_prefix(b"1" * 40))) class PackBasedObjectStoreTests(ObjectStoreTests): - def tearDown(self): + store: PackBasedObjectStore + + def tearDown(self) -> None: for pack in self.store.packs: pack.close() - def test_empty_packs(self): + def test_empty_packs(self) -> None: self.assertEqual([], list(self.store.packs)) - def test_pack_loose_objects(self): + def test_pack_loose_objects(self) -> None: b1 = make_object(Blob, data=b"yummy data") self.store.add_object(b1) b2 = make_object(Blob, data=b"more yummy data") @@ -273,7 +301,7 @@ def test_pack_loose_objects(self): self.assertNotEqual([], list(self.store.packs)) self.assertEqual(0, self.store.pack_loose_objects()) - def test_repack(self): + def test_repack(self) -> None: b1 = make_object(Blob, data=b"yummy data") self.store.add_object(b1) b2 = make_object(Blob, data=b"more yummy data") @@ -290,7 +318,7 @@ def test_repack(self): self.assertEqual(1, len(self.store.packs)) self.assertEqual(0, self.store.pack_loose_objects()) - def test_repack_existing(self): + def test_repack_existing(self) -> None: b1 = make_object(Blob, data=b"yummy data") self.store.add_object(b1) b2 = make_object(Blob, data=b"more yummy data") diff --git a/dulwich/tests/utils.py b/dulwich/tests/utils.py index 2a1dafd3d..b590577d6 100644 --- a/dulwich/tests/utils.py +++ b/dulwich/tests/utils.py @@ -71,7 +71,7 @@ def open_repo(name, temp_dir=None): return Repo(temp_repo_dir) -def tear_down_repo(repo): +def tear_down_repo(repo) -> None: """Tear down a test repository.""" repo.close() temp_dir = os.path.dirname(repo.path.rstrip(os.sep)) @@ -160,7 +160,7 @@ def make_tag(target, **attrs): def functest_builder(method, func): """Generate a test method that tests the given function.""" - def do_test(self): + def do_test(self) -> None: method(self, func) return do_test @@ -188,7 +188,7 @@ def _do_some_test(self, func_impl): func: The function implementation to pass to method. """ - def do_test(self): + def do_test(self) -> None: if not isinstance(func, types.BuiltinFunctionType): raise SkipTest(f"{func} extension not found") method(self, func) @@ -354,12 +354,12 @@ def setup_warning_catcher(): caught_warnings = [] original_showwarning = warnings.showwarning - def custom_showwarning(*args, **kwargs): + def custom_showwarning(*args, **kwargs) -> None: caught_warnings.append(args[0]) warnings.showwarning = custom_showwarning - def restore_showwarning(): + def restore_showwarning() -> None: warnings.showwarning = original_showwarning return caught_warnings, restore_showwarning diff --git a/dulwich/walk.py b/dulwich/walk.py index 79d575cda..9725e598f 100644 --- a/dulwich/walk.py +++ b/dulwich/walk.py @@ -139,7 +139,7 @@ def __init__(self, walker: "Walker") -> None: for commit_id in chain(walker.include, walker.excluded): self._push(commit_id) - def _push(self, object_id: bytes): + def _push(self, object_id: bytes) -> None: try: obj = self._store[object_id] except KeyError as exc: @@ -154,7 +154,7 @@ def _push(self, object_id: bytes): self._pq_set.add(commit.id) self._seen.add(commit.id) - def _exclude_parents(self, commit): + def _exclude_parents(self, commit) -> None: excluded = self._excluded seen = self._seen todo = [commit] @@ -299,7 +299,7 @@ def __init__( self._queue = queue_cls(self) self._out_queue: collections.deque[WalkEntry] = collections.deque() - def _path_matches(self, changed_path): + def _path_matches(self, changed_path) -> bool: if changed_path is None: return False if self.paths is None: @@ -314,7 +314,8 @@ def _path_matches(self, changed_path): return True return False - def _change_matches(self, change): + def _change_matches(self, change) -> bool: + assert self.paths if not change: return False @@ -329,7 +330,7 @@ def _change_matches(self, change): return True return False - def _should_return(self, entry): + def _should_return(self, entry) -> Optional[bool]: """Determine if a walk entry should be returned.. Args: diff --git a/dulwich/web.py b/dulwich/web.py index 71cfe7166..1c15f831f 100644 --- a/dulwich/web.py +++ b/dulwich/web.py @@ -338,7 +338,7 @@ def __init__( self._cache_headers: list[tuple[str, str]] = [] self._headers: list[tuple[str, str]] = [] - def add_header(self, name, value): + def add_header(self, name, value) -> None: """Add a header to the response.""" self._headers.append((name, value)) @@ -511,35 +511,35 @@ def make_wsgi_chain(*args, **kwargs): class ServerHandlerLogger(ServerHandler): """ServerHandler that uses dulwich's logger for logging exceptions.""" - def log_exception(self, exc_info): + def log_exception(self, exc_info) -> None: logger.exception( "Exception happened during processing of request", exc_info=exc_info, ) - def log_message(self, format, *args): + def log_message(self, format, *args) -> None: logger.info(format, *args) - def log_error(self, *args): + def log_error(self, *args) -> None: logger.error(*args) class WSGIRequestHandlerLogger(WSGIRequestHandler): """WSGIRequestHandler that uses dulwich's logger for logging exceptions.""" - def log_exception(self, exc_info): + def log_exception(self, exc_info) -> None: logger.exception( "Exception happened during processing of request", exc_info=exc_info, ) - def log_message(self, format, *args): + def log_message(self, format, *args) -> None: logger.info(format, *args) - def log_error(self, *args): + def log_error(self, *args) -> None: logger.error(*args) - def handle(self): + def handle(self) -> None: """Handle a single HTTP request.""" self.raw_requestline = self.rfile.readline() if not self.parse_request(): # An error code has been sent, just exit @@ -548,19 +548,19 @@ def handle(self): handler = ServerHandlerLogger( self.rfile, self.wfile, self.get_stderr(), self.get_environ() ) - handler.request_handler = self # backpointer for logging - handler.run(self.server.get_app()) + handler.request_handler = self # type: ignore # backpointer for logging + handler.run(self.server.get_app()) # type: ignore class WSGIServerLogger(WSGIServer): - def handle_error(self, request, client_address): + def handle_error(self, request, client_address) -> None: """Handle an error.""" logger.exception( f"Exception happened during processing of request from {client_address!s}" ) -def main(argv=sys.argv): +def main(argv=sys.argv) -> None: """Entry point for starting an HTTP git server.""" import optparse diff --git a/fuzzing/fuzz-targets/fuzz_bundle.py b/fuzzing/fuzz-targets/fuzz_bundle.py index 069511e69..fa0dcd8bf 100644 --- a/fuzzing/fuzz-targets/fuzz_bundle.py +++ b/fuzzing/fuzz-targets/fuzz_bundle.py @@ -1,5 +1,6 @@ import sys from io import BytesIO +from typing import Optional import atheris @@ -11,7 +12,7 @@ from dulwich.pack import PackData, write_pack_objects -def TestOneInput(data): +def TestOneInput(data) -> Optional[int]: fdp = EnhancedFuzzedDataProvider(data) bundle = Bundle() bundle.version = fdp.PickValueInList([2, 3, None]) @@ -45,7 +46,7 @@ def TestOneInput(data): raise e -def main(): +def main() -> None: atheris.Setup(sys.argv, TestOneInput) atheris.Fuzz() diff --git a/fuzzing/fuzz-targets/fuzz_configfile.py b/fuzzing/fuzz-targets/fuzz_configfile.py index 8830e8e2b..1460aa57c 100644 --- a/fuzzing/fuzz-targets/fuzz_configfile.py +++ b/fuzzing/fuzz-targets/fuzz_configfile.py @@ -1,5 +1,6 @@ import sys from io import BytesIO +from typing import Optional import atheris from test_utils import is_expected_exception @@ -8,7 +9,7 @@ from dulwich.config import ConfigFile -def TestOneInput(data): +def TestOneInput(data) -> Optional[int]: try: ConfigFile.from_file(BytesIO(data)) except ValueError as e: @@ -27,7 +28,7 @@ def TestOneInput(data): raise e -def main(): +def main() -> None: atheris.Setup(sys.argv, TestOneInput) atheris.Fuzz() diff --git a/fuzzing/fuzz-targets/fuzz_object_store.py b/fuzzing/fuzz-targets/fuzz_object_store.py index 797939be0..8fcbe8907 100644 --- a/fuzzing/fuzz-targets/fuzz_object_store.py +++ b/fuzzing/fuzz-targets/fuzz_object_store.py @@ -1,6 +1,7 @@ import stat import sys from io import BytesIO +from typing import Optional import atheris @@ -17,7 +18,7 @@ ) -def TestOneInput(data): +def TestOneInput(data) -> Optional[int]: fdp = EnhancedFuzzedDataProvider(data) repo = MemoryRepo() blob = Blob.from_string(fdp.ConsumeRandomBytes()) @@ -85,7 +86,7 @@ def TestOneInput(data): raise e -def main(): +def main() -> None: atheris.Setup(sys.argv, TestOneInput) atheris.Fuzz() diff --git a/fuzzing/fuzz-targets/fuzz_repo.py b/fuzzing/fuzz-targets/fuzz_repo.py index 61db56486..4c0a52ea4 100644 --- a/fuzzing/fuzz-targets/fuzz_repo.py +++ b/fuzzing/fuzz-targets/fuzz_repo.py @@ -1,6 +1,7 @@ import os import sys import tempfile +from typing import Optional import atheris @@ -14,7 +15,7 @@ ) -def TestOneInput(data): +def TestOneInput(data) -> Optional[int]: fdp = EnhancedFuzzedDataProvider(data) with tempfile.TemporaryDirectory() as temp_dir: repo = Repo.init(temp_dir) @@ -54,7 +55,7 @@ def TestOneInput(data): raise e -def main(): +def main() -> None: atheris.Setup(sys.argv, TestOneInput) atheris.Fuzz() diff --git a/fuzzing/fuzz-targets/test_utils.py b/fuzzing/fuzz-targets/test_utils.py index 2ae92ecf3..841450fde 100644 --- a/fuzzing/fuzz-targets/test_utils.py +++ b/fuzzing/fuzz-targets/test_utils.py @@ -4,7 +4,7 @@ @atheris.instrument_func def is_expected_exception( error_message_list: list[str], exception: Exception -): # pragma: no cover +) -> bool: # pragma: no cover """Checks if the message of a given exception matches any of the expected error messages. Args: @@ -23,7 +23,7 @@ def is_expected_exception( class EnhancedFuzzedDataProvider(atheris.FuzzedDataProvider): # pragma: no cover """Extends atheris.FuzzedDataProvider to offer additional methods to make fuzz testing slightly more DRY.""" - def __init__(self, data): + def __init__(self, data) -> None: """Initializes the EnhancedFuzzedDataProvider with fuzzing data from the argument provided to TestOneInput. Args: diff --git a/tests/__init__.py b/tests/__init__.py index b8998c614..d4bd669bb 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -43,13 +43,13 @@ class TestCase(_TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.overrideEnv("HOME", "/nonexistent") self.overrideEnv("GIT_CONFIG_NOSYSTEM", "1") - def overrideEnv(self, name, value): - def restore(): + def overrideEnv(self, name, value) -> None: + def restore() -> None: if oldval is not None: os.environ[name] = oldval else: @@ -168,7 +168,7 @@ def tutorial_test_suite(): to_restore = [] - def overrideEnv(name, value): + def overrideEnv(name, value) -> None: oldval = os.environ.get(name) if value is not None: os.environ[name] = value @@ -176,7 +176,7 @@ def overrideEnv(name, value): del os.environ[name] to_restore.append((name, oldval)) - def setup(test): + def setup(test) -> None: test.__old_cwd = os.getcwd() test.tempdir = tempfile.mkdtemp() test.globs.update({"tempdir": test.tempdir}) @@ -184,7 +184,7 @@ def setup(test): overrideEnv("HOME", "/nonexistent") overrideEnv("GIT_CONFIG_NOSYSTEM", "1") - def teardown(test): + def teardown(test) -> None: os.chdir(test.__old_cwd) shutil.rmtree(test.tempdir) for name, oldval in to_restore: diff --git a/tests/compat/server_utils.py b/tests/compat/server_utils.py index ec82a88b2..40ecb1c2f 100644 --- a/tests/compat/server_utils.py +++ b/tests/compat/server_utils.py @@ -43,7 +43,7 @@ def __init__(self, name) -> None: self.path = os.path.join(temp_dir, name) os.mkdir(self.path) - def close(self): + def close(self) -> None: pass @@ -74,11 +74,11 @@ class ServerTests: 10, ) - def import_repos(self): + def import_repos(self) -> None: self._old_repo = self.import_repo("server_old.export") self._new_repo = self.import_repo("server_new.export") - def url(self, port): + def url(self, port) -> str: return f"{self.protocol}://localhost:{port}/" def branch_args(self, branches=None): @@ -86,7 +86,7 @@ def branch_args(self, branches=None): branches = ["master", "branch"] return [f"{b}:{b}" for b in branches] - def test_push_to_dulwich(self): + def test_push_to_dulwich(self) -> None: self.import_repos() self.assertReposNotEqual(self._old_repo, self._new_repo) port = self._start_server(self._old_repo) @@ -97,7 +97,7 @@ def test_push_to_dulwich(self): ) self.assertReposEqual(self._old_repo, self._new_repo) - def test_push_to_dulwich_no_op(self): + def test_push_to_dulwich_no_op(self) -> None: self._old_repo = self.import_repo("server_old.export") self._new_repo = self.import_repo("server_old.export") self.assertReposEqual(self._old_repo, self._new_repo) @@ -109,7 +109,7 @@ def test_push_to_dulwich_no_op(self): ) self.assertReposEqual(self._old_repo, self._new_repo) - def test_push_to_dulwich_remove_branch(self): + def test_push_to_dulwich_remove_branch(self) -> None: self._old_repo = self.import_repo("server_old.export") self._new_repo = self.import_repo("server_old.export") self.assertReposEqual(self._old_repo, self._new_repo) @@ -119,7 +119,7 @@ def test_push_to_dulwich_remove_branch(self): self.assertEqual(list(self._old_repo.get_refs().keys()), [b"refs/heads/branch"]) - def test_fetch_from_dulwich(self): + def test_fetch_from_dulwich(self) -> None: self.import_repos() self.assertReposNotEqual(self._old_repo, self._new_repo) port = self._start_server(self._new_repo) @@ -132,7 +132,7 @@ def test_fetch_from_dulwich(self): self._old_repo.object_store._pack_cache_time = 0 self.assertReposEqual(self._old_repo, self._new_repo) - def test_fetch_from_dulwich_no_op(self): + def test_fetch_from_dulwich_no_op(self) -> None: self._old_repo = self.import_repo("server_old.export") self._new_repo = self.import_repo("server_old.export") self.assertReposEqual(self._old_repo, self._new_repo) @@ -146,7 +146,7 @@ def test_fetch_from_dulwich_no_op(self): self._old_repo.object_store._pack_cache_time = 0 self.assertReposEqual(self._old_repo, self._new_repo) - def test_clone_from_dulwich_empty(self): + def test_clone_from_dulwich_empty(self) -> None: old_repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, old_repo_dir) self._old_repo = Repo.init_bare(old_repo_dir) @@ -159,13 +159,13 @@ def test_clone_from_dulwich_empty(self): new_repo = Repo(new_repo_dir) self.assertReposEqual(self._old_repo, new_repo) - def test_lsremote_from_dulwich(self): + def test_lsremote_from_dulwich(self) -> None: self._repo = self.import_repo("server_old.export") port = self._start_server(self._repo) o = run_git_or_fail(["ls-remote", self.url(port)]) self.assertEqual(len(o.split(b"\n")), 4) - def test_new_shallow_clone_from_dulwich(self): + def test_new_shallow_clone_from_dulwich(self) -> None: require_git_version(self.min_single_branch_version) self._source_repo = self.import_repo("server_new.export") self._stub_repo = _StubRepo("shallow") @@ -191,7 +191,7 @@ def test_new_shallow_clone_from_dulwich(self): self.assertEqual(expected_shallow, _get_shallow(clone)) self.assertReposNotEqual(clone, self._source_repo) - def test_shallow_clone_from_git_is_identical(self): + def test_shallow_clone_from_git_is_identical(self) -> None: require_git_version(self.min_single_branch_version) self._source_repo = self.import_repo("server_new.export") self._stub_repo_git = _StubRepo("shallow-git") @@ -228,7 +228,7 @@ def test_shallow_clone_from_git_is_identical(self): Repo(self._stub_repo_git.path), Repo(self._stub_repo_dw.path) ) - def test_fetch_same_depth_into_shallow_clone_from_dulwich(self): + def test_fetch_same_depth_into_shallow_clone_from_dulwich(self) -> None: require_git_version(self.min_single_branch_version) self._source_repo = self.import_repo("server_new.export") self._stub_repo = _StubRepo("shallow") @@ -260,7 +260,7 @@ def test_fetch_same_depth_into_shallow_clone_from_dulwich(self): self.assertEqual(expected_shallow, _get_shallow(clone)) self.assertReposNotEqual(clone, self._source_repo) - def test_fetch_full_depth_into_shallow_clone_from_dulwich(self): + def test_fetch_full_depth_into_shallow_clone_from_dulwich(self) -> None: require_git_version(self.min_single_branch_version) self._source_repo = self.import_repo("server_new.export") self._stub_repo = _StubRepo("shallow") @@ -294,7 +294,7 @@ def test_fetch_full_depth_into_shallow_clone_from_dulwich(self): self.assertEqual([], _get_shallow(clone)) self.assertReposEqual(clone, self._source_repo) - def test_fetch_from_dulwich_issue_88_standard(self): + def test_fetch_from_dulwich_issue_88_standard(self) -> None: # Basically an integration test to see that the ACK/NAK # generation works on repos with common head. self._source_repo = self.import_repo("issue88_expect_ack_nak_server.export") @@ -306,7 +306,7 @@ def test_fetch_from_dulwich_issue_88_standard(self): self._source_repo.object_store, self._client_repo.object_store ) - def test_fetch_from_dulwich_issue_88_alternative(self): + def test_fetch_from_dulwich_issue_88_alternative(self) -> None: # likewise, but the case where the two repos have no common parent self._source_repo = self.import_repo("issue88_expect_ack_nak_other.export") self._client_repo = self.import_repo("issue88_expect_ack_nak_client.export") @@ -325,7 +325,7 @@ def test_fetch_from_dulwich_issue_88_alternative(self): ).type_name, ) - def test_push_to_dulwich_issue_88_standard(self): + def test_push_to_dulwich_issue_88_standard(self) -> None: # Same thing, but we reverse the role of the server/client # and do a push instead. self._source_repo = self.import_repo("issue88_expect_ack_nak_client.export") diff --git a/tests/compat/test_client.py b/tests/compat/test_client.py index 97d36dc7f..788a4232c 100644 --- a/tests/compat/test_client.py +++ b/tests/compat/test_client.py @@ -33,6 +33,7 @@ import threading from contextlib import suppress from io import BytesIO +from typing import NoReturn from unittest.mock import patch from urllib.parse import unquote @@ -56,7 +57,7 @@ class DulwichClientTestBase: """Tests for client/server compatibility.""" - def setUp(self): + def setUp(self) -> None: self.gitroot = os.path.dirname( import_repo_to_dir("server_new.export").rstrip(os.sep) ) @@ -64,23 +65,23 @@ def setUp(self): file.ensure_dir_exists(self.dest) run_git_or_fail(["init", "--quiet", "--bare"], cwd=self.dest) - def tearDown(self): + def tearDown(self) -> None: rmtree_ro(self.gitroot) - def assertDestEqualsSrc(self): + def assertDestEqualsSrc(self) -> None: repo_dir = os.path.join(self.gitroot, "server_new.export") dest_repo_dir = os.path.join(self.gitroot, "dest") with repo.Repo(repo_dir) as src: with repo.Repo(dest_repo_dir) as dest: self.assertReposEqual(src, dest) - def _client(self): + def _client(self) -> NoReturn: raise NotImplementedError - def _build_path(self): + def _build_path(self) -> NoReturn: raise NotImplementedError - def _do_send_pack(self): + def _do_send_pack(self) -> None: c = self._client() srcpath = os.path.join(self.gitroot, "server_new.export") with repo.Repo(srcpath) as src: @@ -92,11 +93,11 @@ def _do_send_pack(self): src.generate_pack_data, ) - def test_send_pack(self): + def test_send_pack(self) -> None: self._do_send_pack() self.assertDestEqualsSrc() - def test_send_pack_nothing_to_send(self): + def test_send_pack_nothing_to_send(self) -> None: self._do_send_pack() self.assertDestEqualsSrc() # nothing to send, but shouldn't raise either. @@ -112,7 +113,7 @@ def _add_file(repo, tree_id, filename, contents): repo.object_store.add_object(tree) return tree.id - def test_send_pack_from_shallow_clone(self): + def test_send_pack_from_shallow_clone(self) -> None: c = self._client() server_new_path = os.path.join(self.gitroot, "server_new.export") run_git_or_fail(["config", "http.uploadpack", "true"], cwd=server_new_path) @@ -139,7 +140,7 @@ def test_send_pack_from_shallow_clone(self): with repo.Repo(server_new_path) as remote: self.assertEqual(remote.head(), commit_id) - def test_send_without_report_status(self): + def test_send_without_report_status(self) -> None: c = self._client() c._send_capabilities.remove(b"report-status") srcpath = os.path.join(self.gitroot, "server_new.export") @@ -180,7 +181,7 @@ def compute_send(self, src): del sendrefs[b"HEAD"] return sendrefs, src.generate_pack_data - def test_send_pack_one_error(self): + def test_send_pack_one_error(self) -> None: dest, dummy_commit = self.disable_ff_and_make_dummy_commit() dest.refs[b"refs/heads/master"] = dummy_commit repo_dir = os.path.join(self.gitroot, "server_new.export") @@ -198,7 +199,7 @@ def test_send_pack_one_error(self): result.ref_status, ) - def test_send_pack_multiple_errors(self): + def test_send_pack_multiple_errors(self) -> None: dest, dummy = self.disable_ff_and_make_dummy_commit() # set up for two non-ff errors branch, master = b"refs/heads/branch", b"refs/heads/master" @@ -215,7 +216,7 @@ def test_send_pack_multiple_errors(self): result.ref_status, ) - def test_archive(self): + def test_archive(self) -> None: c = self._client() f = BytesIO() c.archive(self._build_path("/server_new.export"), b"HEAD", f.write) @@ -223,7 +224,7 @@ def test_archive(self): tf = tarfile.open(fileobj=f) self.assertEqual(["baz", "foo"], tf.getnames()) - def test_fetch_pack(self): + def test_fetch_pack(self) -> None: c = self._client() with repo.Repo(os.path.join(self.gitroot, "dest")) as dest: result = c.fetch(self._build_path("/server_new.export"), dest) @@ -235,7 +236,7 @@ def test_fetch_pack(self): dest.refs.set_if_equals(r[0], None, r[1]) self.assertDestEqualsSrc() - def test_fetch_pack_with_nondefault_symref(self): + def test_fetch_pack_with_nondefault_symref(self) -> None: c = self._client() src = repo.Repo(os.path.join(self.gitroot, "server_new.export")) src.refs.add_if_new(b"refs/heads/main", src.refs[b"refs/heads/master"]) @@ -250,7 +251,7 @@ def test_fetch_pack_with_nondefault_symref(self): dest.refs.set_if_equals(r[0], None, r[1]) self.assertDestEqualsSrc() - def test_get_refs_with_peeled_tag(self): + def test_get_refs_with_peeled_tag(self) -> None: tag_create( os.path.join(self.gitroot, "server_new.export"), b"v1.0", @@ -270,7 +271,7 @@ def test_get_refs_with_peeled_tag(self): sorted(refs.keys()), ) - def test_get_refs_with_ref_prefix(self): + def test_get_refs_with_ref_prefix(self) -> None: c = self._client() refs = c.get_refs( self._build_path("/server_new.export"), ref_prefix=[b"refs/heads"] @@ -283,7 +284,7 @@ def test_get_refs_with_ref_prefix(self): sorted(refs.keys()), ) - def test_fetch_pack_depth(self): + def test_fetch_pack_depth(self) -> None: c = self._client() with repo.Repo(os.path.join(self.gitroot, "dest")) as dest: result = c.fetch(self._build_path("/server_new.export"), dest, depth=1) @@ -297,7 +298,7 @@ def test_fetch_pack_depth(self): }, ) - def test_repeat(self): + def test_repeat(self) -> None: c = self._client() with repo.Repo(os.path.join(self.gitroot, "dest")) as dest: result = c.fetch(self._build_path("/server_new.export"), dest) @@ -309,7 +310,7 @@ def test_repeat(self): dest.refs.set_if_equals(r[0], None, r[1]) self.assertDestEqualsSrc() - def test_fetch_empty_pack(self): + def test_fetch_empty_pack(self) -> None: c = self._client() with repo.Repo(os.path.join(self.gitroot, "dest")) as dest: result = c.fetch(self._build_path("/server_new.export"), dest) @@ -329,7 +330,7 @@ def dw(refs, **kwargs): dest.refs.set_if_equals(r[0], None, r[1]) self.assertDestEqualsSrc() - def test_incremental_fetch_pack(self): + def test_incremental_fetch_pack(self) -> None: self.test_fetch_pack() dest, dummy = self.disable_ff_and_make_dummy_commit() dest.refs[b"refs/heads/master"] = dummy @@ -341,7 +342,7 @@ def test_incremental_fetch_pack(self): dest.refs.set_if_equals(r[0], None, r[1]) self.assertDestEqualsSrc() - def test_fetch_pack_no_side_band_64k(self): + def test_fetch_pack_no_side_band_64k(self) -> None: if protocol.DEFAULT_GIT_PROTOCOL_VERSION_FETCH >= 2: raise SkipTest("side-band-64k cannot be disabled with git protocol v2") c = self._client() @@ -352,7 +353,7 @@ def test_fetch_pack_no_side_band_64k(self): dest.refs.set_if_equals(r[0], None, r[1]) self.assertDestEqualsSrc() - def test_fetch_pack_zero_sha(self): + def test_fetch_pack_zero_sha(self) -> None: # zero sha1s are already present on the client, and should # be ignored c = self._client() @@ -365,7 +366,7 @@ def test_fetch_pack_zero_sha(self): for r in result.refs.items(): dest.refs.set_if_equals(r[0], None, r[1]) - def test_send_remove_branch(self): + def test_send_remove_branch(self) -> None: with repo.Repo(os.path.join(self.gitroot, "dest")) as dest: dummy_commit = self.make_dummy_commit(dest) dest.refs[b"refs/heads/master"] = dummy_commit @@ -382,7 +383,7 @@ def gen_pack(have, want, ofs_delta=False, progress=None): c.send_pack(self._build_path("/dest"), lambda _: sendrefs, gen_pack) self.assertNotIn(b"refs/heads/abranch", dest.refs) - def test_send_new_branch_empty_pack(self): + def test_send_new_branch_empty_pack(self) -> None: with repo.Repo(os.path.join(self.gitroot, "dest")) as dest: dummy_commit = self.make_dummy_commit(dest) dest.refs[b"refs/heads/master"] = dummy_commit @@ -397,7 +398,7 @@ def gen_pack(have, want, ofs_delta=False, progress=None): c.send_pack(self._build_path("/dest"), lambda _: sendrefs, gen_pack) self.assertEqual(dummy_commit, dest.refs[b"refs/heads/abranch"]) - def test_get_refs(self): + def test_get_refs(self) -> None: c = self._client() refs = c.get_refs(self._build_path("/server_new.export")) @@ -407,7 +408,7 @@ def test_get_refs(self): class DulwichTCPClientTest(CompatTestCase, DulwichClientTestBase): - def setUp(self): + def setUp(self) -> None: CompatTestCase.setUp(self) DulwichClientTestBase.setUp(self) if check_for_daemon(limit=1): @@ -440,7 +441,7 @@ def setUp(self): if not check_for_daemon(): raise SkipTest("git-daemon failed to start") - def tearDown(self): + def tearDown(self) -> None: with open(self.pidfile) as f: pid = int(f.read().strip()) if sys.platform == "win32": @@ -467,10 +468,10 @@ def _build_path(self, path): if sys.platform == "win32" and protocol.DEFAULT_GIT_PROTOCOL_VERSION_FETCH < 2: @expectedFailure - def test_fetch_pack_no_side_band_64k(self): + def test_fetch_pack_no_side_band_64k(self) -> None: DulwichClientTestBase.test_fetch_pack_no_side_band_64k(self) - def test_send_remove_branch(self): + def test_send_remove_branch(self) -> None: # This test fails intermittently on my machine, probably due to some sort # of race condition. Probably also related to #1015 self.skipTest("skip flaky test; see #1015") @@ -513,13 +514,13 @@ def run_command( class DulwichMockSSHClientTest(CompatTestCase, DulwichClientTestBase): - def setUp(self): + def setUp(self) -> None: CompatTestCase.setUp(self) DulwichClientTestBase.setUp(self) self.real_vendor = client.get_ssh_vendor client.get_ssh_vendor = TestSSHVendor - def tearDown(self): + def tearDown(self) -> None: DulwichClientTestBase.tearDown(self) CompatTestCase.tearDown(self) client.get_ssh_vendor = self.real_vendor @@ -537,11 +538,11 @@ class DulwichMockSSHClientTestGitProtov0(DulwichMockSSHClientTest): class DulwichSubprocessClientTest(CompatTestCase, DulwichClientTestBase): - def setUp(self): + def setUp(self) -> None: CompatTestCase.setUp(self) DulwichClientTestBase.setUp(self) - def tearDown(self): + def tearDown(self) -> None: DulwichClientTestBase.tearDown(self) CompatTestCase.tearDown(self) @@ -564,20 +565,20 @@ class GitHTTPRequestHandler(http.server.SimpleHTTPRequestHandler): # the rest to a subprocess, so we can't use buffered input. rbufsize = 0 - def do_POST(self): + def do_POST(self) -> None: self.run_backend() - def do_GET(self): + def do_GET(self) -> None: self.run_backend() def send_head(self): return self.run_backend() - def log_request(self, code="-", size="-"): + def log_request(self, code="-", size="-") -> None: # Let's be quiet, the test suite is noisy enough already pass - def run_backend(self): + def run_backend(self) -> None: """Call out to git http-backend.""" # Based on CGIHTTPServer.CGIHTTPRequestHandler.run_cgi: # Copyright (c) 2001-2010 Python Software Foundation; @@ -713,14 +714,14 @@ def __init__(self, server_address, root_path) -> None: self.root_path = root_path self.server_name = "localhost" - def get_url(self): + def get_url(self) -> str: return f"http://{self.server_name}:{self.server_port}/" class DulwichHttpClientTest(CompatTestCase, DulwichClientTestBase): min_git_version = (1, 7, 0, 2) - def setUp(self): + def setUp(self) -> None: CompatTestCase.setUp(self) DulwichClientTestBase.setUp(self) self._httpd = HTTPGitServer(("localhost", 0), self.gitroot) @@ -729,7 +730,7 @@ def setUp(self): run_git_or_fail(["config", "http.uploadpack", "true"], cwd=self.dest) run_git_or_fail(["config", "http.receivepack", "true"], cwd=self.dest) - def tearDown(self): + def tearDown(self) -> None: DulwichClientTestBase.tearDown(self) CompatTestCase.tearDown(self) self._httpd.shutdown() @@ -741,7 +742,7 @@ def _client(self): def _build_path(self, path): return path - def test_archive(self): + def test_archive(self) -> NoReturn: raise SkipTest("exporting archives not supported over http") diff --git a/tests/compat/test_pack.py b/tests/compat/test_pack.py index d8b554d8a..7cf25add7 100644 --- a/tests/compat/test_pack.py +++ b/tests/compat/test_pack.py @@ -25,6 +25,7 @@ import re import shutil import tempfile +from typing import NoReturn from dulwich.objects import Blob from dulwich.pack import write_pack @@ -51,13 +52,13 @@ def _git_verify_pack_object_list(output): class TestPack(PackTests): """Compatibility tests for reading and writing pack files.""" - def setUp(self): + def setUp(self) -> None: require_git_version((1, 5, 0)) super().setUp() self._tempdir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self._tempdir) - def test_copy(self): + def test_copy(self) -> None: with self.get_pack(pack1_sha) as origpack: self.assertSucceeds(origpack.index.check) pack_path = os.path.join(self._tempdir, "Elch") @@ -66,7 +67,7 @@ def test_copy(self): orig_shas = {o.id for o in origpack.iterobjects()} self.assertEqual(orig_shas, _git_verify_pack_object_list(output)) - def test_deltas_work(self): + def test_deltas_work(self) -> None: with self.get_pack(pack1_sha) as orig_pack: orig_blob = orig_pack[a_sha] new_blob = Blob() @@ -91,7 +92,7 @@ def test_deltas_work(self): "Expected 3 non-delta objects, got %d" % got_non_delta, ) - def test_delta_medium_object(self): + def test_delta_medium_object(self) -> None: # This tests an object set that will have a copy operation # 2**20 in size. with self.get_pack(pack1_sha) as orig_pack: @@ -130,7 +131,7 @@ def test_delta_medium_object(self): # on the input size. It's impractical to produce deltas for # objects this large, but it's still worth doing the right thing # when it happens. - def test_delta_large_object(self): + def test_delta_large_object(self) -> NoReturn: # This tests an object set that will have a copy operation # 2**25 in size. This is a copy large enough that it requires # two copy operations in git's binary delta format. diff --git a/tests/compat/test_patch.py b/tests/compat/test_patch.py index 31e155642..77e9f58a5 100644 --- a/tests/compat/test_patch.py +++ b/tests/compat/test_patch.py @@ -32,7 +32,7 @@ class CompatPatchTestCase(CompatTestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.test_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.test_dir) @@ -40,7 +40,7 @@ def setUp(self): self.repo = Repo.init(self.repo_path, mkdir=True) self.addCleanup(self.repo.close) - def test_patch_apply(self): + def test_patch_apply(self) -> None: # Prepare the repository # Create some files and commit them diff --git a/tests/compat/test_porcelain.py b/tests/compat/test_porcelain.py index 003b9c43a..a357642f7 100644 --- a/tests/compat/test_porcelain.py +++ b/tests/compat/test_porcelain.py @@ -40,7 +40,7 @@ "gpgme not available, skipping tests that require GPG signing", ) class TagCreateSignTestCase(PorcelainGpgTestCase, CompatTestCase): - def test_sign(self): + def test_sign(self) -> None: # Test that dulwich signatures can be verified by CGit c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] @@ -64,7 +64,7 @@ def test_sign(self): env={"GNUPGHOME": os.environ["GNUPGHOME"]}, ) - def test_verify(self): + def test_verify(self) -> None: # Test that CGit signatures can be verified by dulwich c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] diff --git a/tests/compat/test_repository.py b/tests/compat/test_repository.py index a63bac7ad..a7d5238dd 100644 --- a/tests/compat/test_repository.py +++ b/tests/compat/test_repository.py @@ -34,7 +34,7 @@ class ObjectStoreTestCase(CompatTestCase): """Tests for git repository compatibility.""" - def setUp(self): + def setUp(self) -> None: super().setUp() self._repo = self.import_repo("server_new.export") @@ -55,17 +55,17 @@ def _parse_refs(self, output): def _parse_objects(self, output): return {s.rstrip(b"\n").split(b" ")[0] for s in BytesIO(output)} - def test_bare(self): + def test_bare(self) -> None: self.assertTrue(self._repo.bare) self.assertFalse(os.path.exists(os.path.join(self._repo.path, ".git"))) - def test_head(self): + def test_head(self) -> None: output = self._run_git(["rev-parse", "HEAD"]) head_sha = output.rstrip(b"\n") hex_to_sha(head_sha) self.assertEqual(head_sha, self._repo.refs[b"HEAD"]) - def test_refs(self): + def test_refs(self) -> None: output = self._run_git( ["for-each-ref", "--format=%(refname) %(objecttype) %(objectname)"] ) @@ -90,7 +90,7 @@ def _get_all_shas(self): output = self._run_git(["rev-list", "--all", "--objects"]) return self._parse_objects(output) - def assertShasMatch(self, expected_shas, actual_shas_iter): + def assertShasMatch(self, expected_shas, actual_shas_iter) -> None: actual_shas = set() for sha in actual_shas_iter: obj = self._repo[sha] @@ -98,7 +98,7 @@ def assertShasMatch(self, expected_shas, actual_shas_iter): actual_shas.add(sha) self.assertEqual(expected_shas, actual_shas) - def test_loose_objects(self): + def test_loose_objects(self) -> None: # TODO(dborowitz): This is currently not very useful since # fast-imported repos only contained packed objects. expected_shas = self._get_loose_shas() @@ -106,13 +106,13 @@ def test_loose_objects(self): expected_shas, self._repo.object_store._iter_loose_objects() ) - def test_packed_objects(self): + def test_packed_objects(self) -> None: expected_shas = self._get_all_shas() - self._get_loose_shas() self.assertShasMatch( expected_shas, chain.from_iterable(self._repo.object_store.packs) ) - def test_all_objects(self): + def test_all_objects(self) -> None: expected_shas = self._get_all_shas() self.assertShasMatch(expected_shas, iter(self._repo.object_store)) @@ -136,7 +136,7 @@ def create_new_worktree(self, repo_dir, branch): self.addCleanup(rmtree_ro, temp_dir) return temp_dir - def setUp(self): + def setUp(self) -> None: super().setUp() self._worktree_path = self.create_new_worktree(self._repo.path, "branch") self._worktree_repo = Repo(self._worktree_path) @@ -145,18 +145,18 @@ def setUp(self): self._number_of_working_tree = 2 self._repo = self._worktree_repo - def test_refs(self): + def test_refs(self) -> None: super().test_refs() self.assertEqual( self._mainworktree_repo.refs.allkeys(), self._repo.refs.allkeys() ) - def test_head_equality(self): + def test_head_equality(self) -> None: self.assertNotEqual( self._repo.refs[b"HEAD"], self._mainworktree_repo.refs[b"HEAD"] ) - def test_bare(self): + def test_bare(self) -> None: self.assertFalse(self._repo.bare) self.assertTrue(os.path.isfile(os.path.join(self._repo.path, ".git"))) @@ -167,7 +167,7 @@ def _parse_worktree_list(self, output): worktrees.append(tuple(f.decode() for f in fields)) return worktrees - def test_git_worktree_list(self): + def test_git_worktree_list(self) -> None: # 'git worktree list' was introduced in 2.7.0 require_git_version((2, 7, 0)) output = run_git_or_fail(["worktree", "list"], cwd=self._repo.path) @@ -182,7 +182,7 @@ def test_git_worktree_list(self): self.assertEqual(worktrees[0][1], "(bare)") self.assertTrue(os.path.samefile(worktrees[0][0], self._mainworktree_repo.path)) - def test_git_worktree_config(self): + def test_git_worktree_config(self) -> None: """Test that git worktree config parsing matches the git CLI's behavior.""" # Set some config value in the main repo using the git CLI require_git_version((2, 7, 0)) @@ -222,7 +222,7 @@ class InitNewWorkingDirectoryTestCase(WorkingTreeTestCase): min_git_version = (2, 5, 0) - def setUp(self): + def setUp(self) -> None: super().setUp() self._other_worktree = self._repo worktree_repo_path = tempfile.mkdtemp() @@ -233,11 +233,11 @@ def setUp(self): self.addCleanup(self._repo.close) self._number_of_working_tree = 3 - def test_head_equality(self): + def test_head_equality(self) -> None: self.assertEqual( self._repo.refs[b"HEAD"], self._mainworktree_repo.refs[b"HEAD"] ) - def test_bare(self): + def test_bare(self) -> None: self.assertFalse(self._repo.bare) self.assertTrue(os.path.isfile(os.path.join(self._repo.path, ".git"))) diff --git a/tests/compat/test_server.py b/tests/compat/test_server.py index a518a1431..4f8582c49 100644 --- a/tests/compat/test_server.py +++ b/tests/compat/test_server.py @@ -48,7 +48,7 @@ class GitServerTestCase(ServerTests, CompatTestCase): def _handlers(self): return {b"git-receive-pack": NoSideBand64kReceivePackHandler} - def _check_server(self, dul_server): + def _check_server(self, dul_server) -> None: receive_pack_handler_cls = dul_server.handlers[b"git-receive-pack"] caps = receive_pack_handler_cls.capabilities() self.assertNotIn(b"side-band-64k", caps) @@ -72,7 +72,7 @@ class GitServerSideBand64kTestCase(GitServerTestCase): # side-band-64k in git-receive-pack was introduced in git 1.7.0.2 min_git_version = (1, 7, 0, 2) - def setUp(self): + def setUp(self) -> None: super().setUp() # side-band-64k is broken in the windows client. # https://github.com/msysgit/git/issues/101 @@ -80,10 +80,10 @@ def setUp(self): if os.name == "nt": require_git_version((1, 9, 3)) - def _handlers(self): + def _handlers(self) -> None: return None # default handlers include side-band-64k - def _check_server(self, server): + def _check_server(self, server) -> None: receive_pack_handler_cls = server.handlers[b"git-receive-pack"] caps = receive_pack_handler_cls.capabilities() self.assertIn(b"side-band-64k", caps) diff --git a/tests/compat/test_utils.py b/tests/compat/test_utils.py index 070ace6a2..320cbe5e6 100644 --- a/tests/compat/test_utils.py +++ b/tests/compat/test_utils.py @@ -25,7 +25,7 @@ class GitVersionTests(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self._orig_run_git = utils.run_git self._version_str = None # tests can override to set stub version @@ -36,36 +36,36 @@ def run_git(args, **unused_kwargs): utils.run_git = run_git - def tearDown(self): + def tearDown(self) -> None: super().tearDown() utils.run_git = self._orig_run_git - def test_git_version_none(self): + def test_git_version_none(self) -> None: self._version_str = b"not a git version" self.assertEqual(None, utils.git_version()) - def test_git_version_3(self): + def test_git_version_3(self) -> None: self._version_str = b"git version 1.6.6" self.assertEqual((1, 6, 6, 0), utils.git_version()) - def test_git_version_4(self): + def test_git_version_4(self) -> None: self._version_str = b"git version 1.7.0.2" self.assertEqual((1, 7, 0, 2), utils.git_version()) - def test_git_version_extra(self): + def test_git_version_extra(self) -> None: self._version_str = b"git version 1.7.0.3.295.gd8fa2" self.assertEqual((1, 7, 0, 3), utils.git_version()) - def assertRequireSucceeds(self, required_version): + def assertRequireSucceeds(self, required_version) -> None: try: utils.require_git_version(required_version) except SkipTest: self.fail() - def assertRequireFails(self, required_version): + def assertRequireFails(self, required_version) -> None: self.assertRaises(SkipTest, utils.require_git_version, required_version) - def test_require_git_version(self): + def test_require_git_version(self) -> None: try: self._version_str = b"git version 1.6.6" self.assertRequireSucceeds((1, 6, 6)) diff --git a/tests/compat/test_web.py b/tests/compat/test_web.py index 68c3ebf3c..97fc82c87 100644 --- a/tests/compat/test_web.py +++ b/tests/compat/test_web.py @@ -27,6 +27,7 @@ import sys import threading +from typing import NoReturn from wsgiref import simple_server from dulwich.server import DictBackend, ReceivePackHandler, UploadPackHandler @@ -82,7 +83,7 @@ class SmartWebTestCase(WebTests, CompatTestCase): def _handlers(self): return {b"git-receive-pack": NoSideBand64kReceivePackHandler} - def _check_app(self, app): + def _check_app(self, app) -> None: receive_pack_handler_cls = app.handlers[b"git-receive-pack"] caps = receive_pack_handler_cls.capabilities() self.assertNotIn(b"side-band-64k", caps) @@ -119,20 +120,20 @@ class SmartWebSideBand64kTestCase(SmartWebTestCase): # side-band-64k in git-receive-pack was introduced in git 1.7.0.2 min_git_version = (1, 7, 0, 2) - def setUp(self): + def setUp(self) -> None: self.o_uph_cap = patch_capabilities(UploadPackHandler, (b"no-done",)) self.o_rph_cap = patch_capabilities(ReceivePackHandler, (b"no-done",)) super().setUp() - def tearDown(self): + def tearDown(self) -> None: super().tearDown() UploadPackHandler.capabilities = self.o_uph_cap ReceivePackHandler.capabilities = self.o_rph_cap - def _handlers(self): + def _handlers(self) -> None: return None # default handlers include side-band-64k - def _check_app(self, app): + def _check_app(self, app) -> None: receive_pack_handler_cls = app.handlers[b"git-receive-pack"] caps = receive_pack_handler_cls.capabilities() self.assertIn(b"side-band-64k", caps) @@ -147,10 +148,10 @@ class SmartWebSideBand64kNoDoneTestCase(SmartWebTestCase): # no-done was introduced in git 1.7.4 min_git_version = (1, 7, 4) - def _handlers(self): + def _handlers(self) -> None: return None # default handlers include side-band-64k - def _check_app(self, app): + def _check_app(self, app) -> None: receive_pack_handler_cls = app.handlers[b"git-receive-pack"] caps = receive_pack_handler_cls.capabilities() self.assertIn(b"side-band-64k", caps) @@ -164,33 +165,33 @@ class DumbWebTestCase(WebTests, CompatTestCase): def _make_app(self, backend): return make_wsgi_chain(backend, dumb=True) - def test_push_to_dulwich(self): + def test_push_to_dulwich(self) -> NoReturn: # Note: remove this if dulwich implements dumb web pushing. raise SkipTest("Dumb web pushing not supported.") - def test_push_to_dulwich_remove_branch(self): + def test_push_to_dulwich_remove_branch(self) -> NoReturn: # Note: remove this if dumb pushing is supported raise SkipTest("Dumb web pushing not supported.") - def test_new_shallow_clone_from_dulwich(self): + def test_new_shallow_clone_from_dulwich(self) -> NoReturn: # Note: remove this if C git and dulwich implement dumb web shallow # clones. raise SkipTest("Dumb web shallow cloning not supported.") - def test_shallow_clone_from_git_is_identical(self): + def test_shallow_clone_from_git_is_identical(self) -> NoReturn: # Note: remove this if C git and dulwich implement dumb web shallow # clones. raise SkipTest("Dumb web shallow cloning not supported.") - def test_fetch_same_depth_into_shallow_clone_from_dulwich(self): + def test_fetch_same_depth_into_shallow_clone_from_dulwich(self) -> NoReturn: # Note: remove this if C git and dulwich implement dumb web shallow # clones. raise SkipTest("Dumb web shallow cloning not supported.") - def test_fetch_full_depth_into_shallow_clone_from_dulwich(self): + def test_fetch_full_depth_into_shallow_clone_from_dulwich(self) -> NoReturn: # Note: remove this if C git and dulwich implement dumb web shallow # clones. raise SkipTest("Dumb web shallow cloning not supported.") - def test_push_to_dulwich_issue_88_standard(self): + def test_push_to_dulwich_issue_88_standard(self) -> NoReturn: raise SkipTest("Dumb web pushing not supported.") diff --git a/tests/compat/utils.py b/tests/compat/utils.py index dc61c7a72..bb430fce4 100644 --- a/tests/compat/utils.py +++ b/tests/compat/utils.py @@ -73,7 +73,7 @@ def git_version(git_path=_DEFAULT_GIT): return tuple(nums[:_VERSION_LEN]) -def require_git_version(required_version, git_path=_DEFAULT_GIT): +def require_git_version(required_version, git_path=_DEFAULT_GIT) -> None: """Require git version >= version, or skip the calling test. Args: @@ -196,7 +196,7 @@ def import_repo_to_dir(name): return temp_repo_dir -def check_for_daemon(limit=10, delay=0.1, timeout=0.1, port=TCP_GIT_PORT): +def check_for_daemon(limit=10, delay=0.1, timeout=0.1, port=TCP_GIT_PORT) -> bool: """Check for a running TCP daemon. Defaults to checking 10 times with a delay of 0.1 sec between tries. @@ -237,18 +237,18 @@ class CompatTestCase(TestCase): min_git_version: tuple[int, ...] = (1, 5, 0) - def setUp(self): + def setUp(self) -> None: super().setUp() require_git_version(self.min_git_version) - def assertObjectStoreEqual(self, store1, store2): + def assertObjectStoreEqual(self, store1, store2) -> None: self.assertEqual(sorted(set(store1)), sorted(set(store2))) - def assertReposEqual(self, repo1, repo2): + def assertReposEqual(self, repo1, repo2) -> None: self.assertEqual(repo1.get_refs(), repo2.get_refs()) self.assertObjectStoreEqual(repo1.object_store, repo2.object_store) - def assertReposNotEqual(self, repo1, repo2): + def assertReposNotEqual(self, repo1, repo2) -> None: refs1 = repo1.get_refs() objs1 = set(repo1.object_store) refs2 = repo2.get_refs() @@ -267,7 +267,7 @@ def import_repo(self, name): path = import_repo_to_dir(name) repo = Repo(path) - def cleanup(): + def cleanup() -> None: repo.close() rmtree_ro(os.path.dirname(path.rstrip(os.sep))) @@ -277,7 +277,7 @@ def cleanup(): if sys.platform == "win32": - def remove_ro(action, name, exc): + def remove_ro(action, name, exc) -> None: os.chmod(name, stat.S_IWRITE) os.remove(name) diff --git a/tests/contrib/test_paramiko_vendor.py b/tests/contrib/test_paramiko_vendor.py index 0e12e5d77..d552a874f 100644 --- a/tests/contrib/test_paramiko_vendor.py +++ b/tests/contrib/test_paramiko_vendor.py @@ -22,6 +22,7 @@ import socket import threading from io import StringIO +from typing import Optional from unittest import skipIf from .. import TestCase @@ -41,7 +42,7 @@ def __init__(self, commands, *args, **kwargs) -> None: super().__init__(*args, **kwargs) self.commands = commands - def check_channel_exec_request(self, channel, command): + def check_channel_exec_request(self, channel, command) -> bool: self.commands.append(command) return True @@ -61,7 +62,7 @@ def check_channel_request(self, kind, chanid): return paramiko.OPEN_SUCCEEDED return paramiko.OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED - def get_allowed_auths(self, username): + def get_allowed_auths(self, username) -> str: return "password,publickey" @@ -127,7 +128,7 @@ def get_allowed_auths(self, username): @skipIf(not has_paramiko, "paramiko is not installed") class ParamikoSSHVendorTests(TestCase): - def setUp(self): + def setUp(self) -> None: import paramiko.transport # re-enable server functionality for tests @@ -145,10 +146,10 @@ def setUp(self): self.thread = threading.Thread(target=self._run) self.thread.start() - def tearDown(self): + def tearDown(self) -> None: self.thread.join() - def _run(self): + def _run(self) -> Optional[bool]: try: conn, addr = self.socket.accept() except OSError: @@ -160,7 +161,7 @@ def _run(self): server = Server(self.commands) self.transport.start_server(server=server) - def test_run_command_password(self): + def test_run_command_password(self) -> None: vendor = ParamikoSSHVendor( allow_agent=False, look_for_keys=False, @@ -175,7 +176,7 @@ def test_run_command_password(self): self.assertIn(b"test_run_command_password", self.commands) - def test_run_command_with_privkey(self): + def test_run_command_with_privkey(self) -> None: key = paramiko.RSAKey.from_private_key(StringIO(CLIENT_KEY)) vendor = ParamikoSSHVendor( @@ -192,7 +193,7 @@ def test_run_command_with_privkey(self): self.assertIn(b"test_run_command_with_privkey", self.commands) - def test_run_command_data_transfer(self): + def test_run_command_data_transfer(self) -> None: vendor = ParamikoSSHVendor( allow_agent=False, look_for_keys=False, diff --git a/tests/contrib/test_release_robot.py b/tests/contrib/test_release_robot.py index 65ab19e79..4a97249d3 100644 --- a/tests/contrib/test_release_robot.py +++ b/tests/contrib/test_release_robot.py @@ -42,7 +42,7 @@ def gmtime_to_datetime(gmt): class TagPatternTests(unittest.TestCase): """test tag patterns.""" - def test_tag_pattern(self): + def test_tag_pattern(self) -> None: """Test tag patterns.""" test_cases = { "0.3": "0.3", @@ -77,7 +77,7 @@ class GetRecentTagsTest(unittest.TestCase): } @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: cls.projdir = tempfile.mkdtemp() # temporary project directory cls.repo = Repo.init(cls.projdir) # test repo obj_store = cls.repo.object_store # test repo object store @@ -113,11 +113,11 @@ def setUpClass(cls): cls.repo[b"refs/tags/" + cls.t2.name] = cls.t2.id # add annotated tag @classmethod - def tearDownClass(cls): + def tearDownClass(cls) -> None: cls.repo.close() shutil.rmtree(cls.projdir) - def test_get_recent_tags(self): + def test_get_recent_tags(self) -> None: """Test get recent tags.""" tags = release_robot.get_recent_tags(self.projdir) # get test tags for tag, metadata in tags: diff --git a/tests/contrib/test_swift.py b/tests/contrib/test_swift.py index b4100d7d6..1b2555380 100644 --- a/tests/contrib/test_swift.py +++ b/tests/contrib/test_swift.py @@ -190,7 +190,7 @@ def __init__(self, root, conf, store=None) -> None: self.chunk_length = 12228 self.cache_length = 1 - def put_object(self, name, content): + def put_object(self, name, content) -> None: name = posixpath.join(self.root, name) if hasattr(content, "seek"): content.seek(0) @@ -218,7 +218,7 @@ def get_object(self, name, range=None): def get_container_objects(self): return [{"name": k.replace(self.root + "/", "")} for k in self.store] - def create_root(self): + def create_root(self) -> None: if self.root in self.store.keys(): pass else: @@ -233,11 +233,11 @@ def get_object_stat(self, name): @skipIf(missing_libs, skipmsg) class TestSwiftRepo(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.conf = swift.load_conf(file=StringIO(config_file % def_config_file)) - def test_init(self): + def test_init(self) -> None: store = {"fakerepo/objects/pack": ""} with patch( "dulwich.contrib.swift.SwiftConnector", @@ -246,14 +246,14 @@ def test_init(self): ): swift.SwiftRepo("fakerepo", conf=self.conf) - def test_init_no_data(self): + def test_init_no_data(self) -> None: with patch( "dulwich.contrib.swift.SwiftConnector", new_callable=create_swift_connector, ): self.assertRaises(Exception, swift.SwiftRepo, "fakerepo", self.conf) - def test_init_bad_data(self): + def test_init_bad_data(self) -> None: store = {"fakerepo/.git/objects/pack": ""} with patch( "dulwich.contrib.swift.SwiftConnector", @@ -262,7 +262,7 @@ def test_init_bad_data(self): ): self.assertRaises(Exception, swift.SwiftRepo, "fakerepo", self.conf) - def test_put_named_file(self): + def test_put_named_file(self) -> None: store = {"fakerepo/objects/pack": ""} with patch( "dulwich.contrib.swift.SwiftConnector", @@ -274,7 +274,7 @@ def test_put_named_file(self): repo._put_named_file("description", desc) self.assertEqual(repo.scon.store["fakerepo/description"], desc) - def test_init_bare(self): + def test_init_bare(self) -> None: fsc = FakeSwiftConnector("fakeroot", conf=self.conf) with patch( "dulwich.contrib.swift.SwiftConnector", @@ -289,7 +289,7 @@ def test_init_bare(self): @skipIf(missing_libs, skipmsg) class TestSwiftInfoRefsContainer(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() content = ( b"22effb216e3a82f97da599b8885a6cadb488b4c5\trefs/heads/master\n" @@ -300,7 +300,7 @@ def setUp(self): self.fsc = FakeSwiftConnector("fakerepo", conf=self.conf) self.object_store = {} - def test_init(self): + def test_init(self) -> None: """info/refs does not exists.""" irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store) self.assertEqual(len(irc._refs), 0) @@ -309,7 +309,7 @@ def test_init(self): self.assertIn(b"refs/heads/dev", irc.allkeys()) self.assertIn(b"refs/heads/master", irc.allkeys()) - def test_set_if_equals(self): + def test_set_if_equals(self) -> None: self.fsc.store = self.store irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store) irc.set_if_equals( @@ -319,7 +319,7 @@ def test_set_if_equals(self): ) self.assertEqual(irc[b"refs/heads/dev"], b"1" * 40) - def test_remove_if_equals(self): + def test_remove_if_equals(self) -> None: self.fsc.store = self.store irc = swift.SwiftInfoRefsContainer(self.fsc, self.object_store) irc.remove_if_equals( @@ -330,13 +330,13 @@ def test_remove_if_equals(self): @skipIf(missing_libs, skipmsg) class TestSwiftConnector(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.conf = swift.load_conf(file=StringIO(config_file % def_config_file)) with patch("geventhttpclient.HTTPClient.request", fake_auth_request_v1): self.conn = swift.SwiftConnector("fakerepo", conf=self.conf) - def test_init_connector(self): + def test_init_connector(self) -> None: self.assertEqual(self.conn.auth_ver, "1") self.assertEqual(self.conn.auth_url, "http://127.0.0.1:8080/auth/v1.0") self.assertEqual(self.conn.user, "test:tester") @@ -363,18 +363,18 @@ def test_init_connector(self): lambda: swift.SwiftConnector("fakerepo", conf=self.conf), ) - def test_root_exists(self): + def test_root_exists(self) -> None: with patch("geventhttpclient.HTTPClient.request", lambda *args: Response()): self.assertEqual(self.conn.test_root_exists(), True) - def test_root_not_exists(self): + def test_root_not_exists(self) -> None: with patch( "geventhttpclient.HTTPClient.request", lambda *args: Response(status=404), ): self.assertEqual(self.conn.test_root_exists(), None) - def test_create_root(self): + def test_create_root(self) -> None: with patch( "dulwich.contrib.swift.SwiftConnector.test_root_exists", lambda *args: None, @@ -382,7 +382,7 @@ def test_create_root(self): with patch("geventhttpclient.HTTPClient.request", lambda *args: Response()): self.assertEqual(self.conn.create_root(), None) - def test_create_root_fails(self): + def test_create_root_fails(self) -> None: with patch( "dulwich.contrib.swift.SwiftConnector.test_root_exists", lambda *args: None, @@ -393,42 +393,42 @@ def test_create_root_fails(self): ): self.assertRaises(swift.SwiftException, self.conn.create_root) - def test_get_container_objects(self): + def test_get_container_objects(self) -> None: with patch( "geventhttpclient.HTTPClient.request", lambda *args: Response(content=json.dumps(({"name": "a"}, {"name": "b"}))), ): self.assertEqual(len(self.conn.get_container_objects()), 2) - def test_get_container_objects_fails(self): + def test_get_container_objects_fails(self) -> None: with patch( "geventhttpclient.HTTPClient.request", lambda *args: Response(status=404), ): self.assertEqual(self.conn.get_container_objects(), None) - def test_get_object_stat(self): + def test_get_object_stat(self) -> None: with patch( "geventhttpclient.HTTPClient.request", lambda *args: Response(headers={"content-length": "10"}), ): self.assertEqual(self.conn.get_object_stat("a")["content-length"], "10") - def test_get_object_stat_fails(self): + def test_get_object_stat_fails(self) -> None: with patch( "geventhttpclient.HTTPClient.request", lambda *args: Response(status=404), ): self.assertEqual(self.conn.get_object_stat("a"), None) - def test_put_object(self): + def test_put_object(self) -> None: with patch( "geventhttpclient.HTTPClient.request", lambda *args, **kwargs: Response(), ): self.assertEqual(self.conn.put_object("a", BytesIO(b"content")), None) - def test_put_object_fails(self): + def test_put_object_fails(self) -> None: with patch( "geventhttpclient.HTTPClient.request", lambda *args, **kwargs: Response(status=400), @@ -438,7 +438,7 @@ def test_put_object_fails(self): lambda: self.conn.put_object("a", BytesIO(b"content")), ) - def test_get_object(self): + def test_get_object(self) -> None: with patch( "geventhttpclient.HTTPClient.request", lambda *args, **kwargs: Response(content=b"content"), @@ -450,18 +450,18 @@ def test_get_object(self): ): self.assertEqual(self.conn.get_object("a", range="0-6"), b"content") - def test_get_object_fails(self): + def test_get_object_fails(self) -> None: with patch( "geventhttpclient.HTTPClient.request", lambda *args, **kwargs: Response(status=404), ): self.assertEqual(self.conn.get_object("a"), None) - def test_del_object(self): + def test_del_object(self) -> None: with patch("geventhttpclient.HTTPClient.request", lambda *args: Response()): self.assertEqual(self.conn.del_object("a"), None) - def test_del_root(self): + def test_del_root(self) -> None: with patch( "dulwich.contrib.swift.SwiftConnector.del_object", lambda *args: None, @@ -479,7 +479,7 @@ def test_del_root(self): @skipIf(missing_libs, skipmsg) class SwiftObjectStoreTests(ObjectStoreTests, TestCase): - def setUp(self): + def setUp(self) -> None: TestCase.setUp(self) conf = swift.load_conf(file=StringIO(config_file % def_config_file)) fsc = FakeSwiftConnector("fakerepo", conf=conf) diff --git a/tests/contrib/test_swift_smoke.py b/tests/contrib/test_swift_smoke.py index 9ce7e7a08..130f4ba33 100644 --- a/tests/contrib/test_swift_smoke.py +++ b/tests/contrib/test_swift_smoke.py @@ -54,11 +54,11 @@ def __init__(self, backend, port) -> None: self.port = port self.backend = backend - def run(self): + def run(self) -> None: self.server = server.TCPGitServer(self.backend, "localhost", port=self.port) self.job = gevent.spawn(self.server.serve_forever) - def stop(self): + def stop(self) -> None: self.server.shutdown() gevent.joinall((self.job,)) @@ -70,7 +70,7 @@ def open_repository(self, path): class SwiftRepoSmokeTest(unittest.TestCase): @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: cls.backend = SwiftSystemBackend() cls.port = 9148 cls.server_address = "localhost" @@ -80,10 +80,10 @@ def setUpClass(cls): cls.conf = swift.load_conf() @classmethod - def tearDownClass(cls): + def tearDownClass(cls) -> None: cls.th_server.stop() - def setUp(self): + def setUp(self) -> None: self.scon = swift.SwiftConnector(self.fakerepo, self.conf) if self.scon.test_root_exists(): try: @@ -94,7 +94,7 @@ def setUp(self): if os.path.isdir(self.temp_d): shutil.rmtree(self.temp_d) - def tearDown(self): + def tearDown(self) -> None: if self.scon.test_root_exists(): try: self.scon.del_root() @@ -103,7 +103,7 @@ def tearDown(self): if os.path.isdir(self.temp_d): shutil.rmtree(self.temp_d) - def test_init_bare(self): + def test_init_bare(self) -> None: swift.SwiftRepo.init_bare(self.scon, self.conf) self.assertTrue(self.scon.test_root_exists()) obj = self.scon.get_container_objects() @@ -112,7 +112,7 @@ def test_init_bare(self): ] self.assertEqual(len(filtered), 2) - def test_clone_bare(self): + def test_clone_bare(self) -> None: local_repo = repo.Repo.init(self.temp_d, mkdir=True) swift.SwiftRepo.init_bare(self.scon, self.conf) tcp_client = client.TCPGitClient(self.server_address, port=self.port) @@ -120,7 +120,7 @@ def test_clone_bare(self): # The remote repo is empty (no refs retrieved) self.assertEqual(remote_refs, None) - def test_push_commit(self): + def test_push_commit(self) -> None: def determine_wants(*args, **kwargs): return {"refs/heads/master": local_repo.refs["HEAD"]} @@ -137,7 +137,7 @@ def determine_wants(*args, **kwargs): remote_sha = swift_repo.refs.read_loose_ref("refs/heads/master") self.assertEqual(sha, remote_sha) - def test_push_branch(self): + def test_push_branch(self) -> None: def determine_wants(*args, **kwargs): return {"refs/heads/mybranch": local_repo.refs["refs/heads/mybranch"]} @@ -154,7 +154,7 @@ def determine_wants(*args, **kwargs): remote_sha = swift_repo.refs.read_loose_ref("refs/heads/mybranch") self.assertEqual(sha, remote_sha) - def test_push_multiple_branch(self): + def test_push_multiple_branch(self) -> None: def determine_wants(*args, **kwargs): return { "refs/heads/mybranch": local_repo.refs["refs/heads/mybranch"], @@ -182,7 +182,7 @@ def determine_wants(*args, **kwargs): remote_shas[branch] = swift_repo.refs.read_loose_ref(f"refs/heads/{branch}") self.assertDictEqual(local_shas, remote_shas) - def test_push_data_branch(self): + def test_push_data_branch(self) -> None: def determine_wants(*args, **kwargs): return {"refs/heads/master": local_repo.refs["HEAD"]} @@ -216,7 +216,7 @@ def determine_wants(*args, **kwargs): # Tree self.assertEqual(objs_[0][0], 2) - def test_clone_then_push_data(self): + def test_clone_then_push_data(self) -> None: self.test_push_data_branch() shutil.rmtree(self.temp_d) local_repo = repo.Repo.init(self.temp_d, mkdir=True) @@ -250,7 +250,7 @@ def determine_wants(*args, **kwargs): "/fakerepo", determine_wants, local_repo.generate_pack_data ) - def test_push_remove_branch(self): + def test_push_remove_branch(self) -> None: def determine_wants(*args, **kwargs): return { "refs/heads/pullr-108": objects.ZERO_SHA, @@ -267,7 +267,7 @@ def determine_wants(*args, **kwargs): swift_repo = swift.SwiftRepo("fakerepo", self.conf) self.assertNotIn("refs/heads/pullr-108", swift_repo.refs.allkeys()) - def test_push_annotated_tag(self): + def test_push_annotated_tag(self) -> None: def determine_wants(*args, **kwargs): return { "refs/heads/master": local_repo.refs["HEAD"], diff --git a/tests/test_archive.py b/tests/test_archive.py index 70dccc78f..5cc277d39 100644 --- a/tests/test_archive.py +++ b/tests/test_archive.py @@ -39,7 +39,7 @@ class ArchiveTests(TestCase): - def test_empty(self): + def test_empty(self) -> None: store = MemoryObjectStore() c1, c2, c3 = build_commit_graph(store, [[1], [2, 1], [3, 1, 2]]) tree = store[c3.tree] @@ -59,13 +59,13 @@ def _get_example_tar_stream(self, *tar_stream_args, **tar_stream_kwargs): stream = b"".join(tar_stream(store, t1, *tar_stream_args, **tar_stream_kwargs)) return BytesIO(stream) - def test_simple(self): + def test_simple(self) -> None: stream = self._get_example_tar_stream(mtime=0) tf = tarfile.TarFile(fileobj=stream) self.addCleanup(tf.close) self.assertEqual(["somename"], tf.getnames()) - def test_unicode(self): + def test_unicode(self) -> None: store = MemoryObjectStore() b1 = Blob.from_string(b"somedata") store.add_object(b1) @@ -77,19 +77,19 @@ def test_unicode(self): self.addCleanup(tf.close) self.assertEqual(["ő"], tf.getnames()) - def test_prefix(self): + def test_prefix(self) -> None: stream = self._get_example_tar_stream(mtime=0, prefix=b"blah") tf = tarfile.TarFile(fileobj=stream) self.addCleanup(tf.close) self.assertEqual(["blah/somename"], tf.getnames()) - def test_gzip_mtime(self): + def test_gzip_mtime(self) -> None: stream = self._get_example_tar_stream(mtime=1234, format="gz") expected_mtime = struct.pack(" None: contents = [None, None] for format in ["", "gz", "bz2"]: for i in [0, 1]: diff --git a/tests/test_blackbox.py b/tests/test_blackbox.py index 34e2e47b1..484c65186 100644 --- a/tests/test_blackbox.py +++ b/tests/test_blackbox.py @@ -31,19 +31,19 @@ class GitReceivePackTests(BlackboxTestCase): """Blackbox tests for dul-receive-pack.""" - def setUp(self): + def setUp(self) -> None: super().setUp() self.path = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.path) self.repo = Repo.init(self.path) - def test_basic(self): + def test_basic(self) -> None: process = self.run_command("dul-receive-pack", [self.path]) (stdout, stderr) = process.communicate(b"0000") self.assertEqual(b"0000", stdout[-4:]) self.assertEqual(0, process.returncode) - def test_missing_arg(self): + def test_missing_arg(self) -> None: process = self.run_command("dul-receive-pack", []) (stdout, stderr) = process.communicate() self.assertEqual( @@ -56,13 +56,13 @@ def test_missing_arg(self): class GitUploadPackTests(BlackboxTestCase): """Blackbox tests for dul-upload-pack.""" - def setUp(self): + def setUp(self) -> None: super().setUp() self.path = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.path) self.repo = Repo.init(self.path) - def test_missing_arg(self): + def test_missing_arg(self) -> None: process = self.run_command("dul-upload-pack", []) (stdout, stderr) = process.communicate() self.assertEqual( diff --git a/tests/test_bundle.py b/tests/test_bundle.py index d60d22272..5dcf4a92d 100644 --- a/tests/test_bundle.py +++ b/tests/test_bundle.py @@ -31,7 +31,7 @@ class BundleTests(TestCase): - def test_roundtrip_bundle(self): + def test_roundtrip_bundle(self) -> None: origbundle = Bundle() origbundle.version = 3 origbundle.capabilities = {"foo": None} diff --git a/tests/test_client.py b/tests/test_client.py index 4d58eb319..12ce451c1 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -25,6 +25,7 @@ import tempfile import warnings from io import BytesIO +from typing import NoReturn from unittest.mock import patch from urllib.parse import quote as urlquote from urllib.parse import urlparse @@ -88,19 +89,19 @@ def __init__(self, *args, **kwards) -> None: def communicate(self, *args, **kwards): return ("Running", "") - def wait(self, *args, **kwards): + def wait(self, *args, **kwards) -> bool: return False # TODO(durin42): add unit-level tests of GitClient class GitClientTests(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.rout = BytesIO() self.rin = BytesIO() self.client = DummyClient(lambda x: True, self.rin.read, self.rout.write) - def test_caps(self): + def test_caps(self) -> None: agent_cap = ("agent=dulwich/%d.%d.%d" % dulwich.__version__).encode("ascii") self.assertEqual( { @@ -125,13 +126,13 @@ def test_caps(self): set(self.client._send_capabilities), ) - def test_archive_ack(self): + def test_archive_ack(self) -> None: self.rin.write(b"0009NACK\n" b"0000") self.rin.seek(0) self.client.archive(b"bla", b"HEAD", None, None) self.assertEqual(self.rout.getvalue(), b"0011argument HEAD0000") - def test_fetch_empty(self): + def test_fetch_empty(self) -> None: self.rin.write(b"0000") self.rin.seek(0) @@ -143,7 +144,7 @@ def check_heads(heads, **kwargs): self.assertEqual({}, ret.refs) self.assertEqual({}, ret.symrefs) - def test_fetch_pack_ignores_magic_ref(self): + def test_fetch_pack_ignores_magic_ref(self) -> None: self.rin.write( b"00000000000000000000000000000000000000000000 capabilities^{}" b"\x00 multi_ack " @@ -162,7 +163,7 @@ def check_heads(heads, **kwargs): self.assertEqual({}, ret.symrefs) self.assertEqual(self.rout.getvalue(), b"0000") - def test_fetch_pack_none(self): + def test_fetch_pack_none(self) -> None: self.rin.write( b"008855dcc6bf963f922e1ed5c4bbaaefcfacef57b1d7 HEAD\x00multi_ack " b"thin-pack side-band side-band-64k ofs-delta shallow no-progress " @@ -226,7 +227,7 @@ def generate_pack_data(have, want, ofs_delta=False, progress=None): ) self.assertEqual({b"refs/foo/bar": commit.id}, result.refs) - def test_send_pack_none(self): + def test_send_pack_none(self) -> None: # Set ref to current value self.rin.write( b"0078310ca9477129b8586fa2afc779c1f57cf64bba6c " @@ -245,7 +246,7 @@ def generate_pack_data(have, want, ofs_delta=False, progress=None): self.client.send_pack(b"/", update_refs, generate_pack_data) self.assertEqual(self.rout.getvalue(), b"0000") - def test_send_pack_keep_and_delete(self): + def test_send_pack_keep_and_delete(self) -> None: self.rin.write( b"0063310ca9477129b8586fa2afc779c1f57cf64bba6c " b"refs/heads/master\x00report-status delete-refs ofs-delta\n" @@ -270,7 +271,7 @@ def generate_pack_data(have, want, ofs_delta=False, progress=None): b"refs/heads/master\x00delete-refs ofs-delta report-status0000", ) - def test_send_pack_delete_only(self): + def test_send_pack_delete_only(self) -> None: self.rin.write( b"0063310ca9477129b8586fa2afc779c1f57cf64bba6c " b"refs/heads/master\x00report-status delete-refs ofs-delta\n" @@ -294,7 +295,7 @@ def generate_pack_data(have, want, ofs_delta=False, progress=None): b"refs/heads/master\x00delete-refs ofs-delta report-status0000", ) - def test_send_pack_new_ref_only(self): + def test_send_pack_new_ref_only(self) -> None: self.rin.write( b"0063310ca9477129b8586fa2afc779c1f57cf64bba6c " b"refs/heads/master\x00report-status delete-refs ofs-delta\n" @@ -324,7 +325,7 @@ def generate_pack_data(have, want, ofs_delta=False, progress=None): + f.getvalue(), ) - def test_send_pack_new_ref(self): + def test_send_pack_new_ref(self) -> None: self.rin.write( b"0064310ca9477129b8586fa2afc779c1f57cf64bba6c " b"refs/heads/master\x00 report-status delete-refs ofs-delta\n" @@ -370,7 +371,7 @@ def generate_pack_data(have, want, ofs_delta=False, progress=None): + f.getvalue(), ) - def test_send_pack_no_deleteref_delete_only(self): + def test_send_pack_no_deleteref_delete_only(self) -> None: pkts = [ b"310ca9477129b8586fa2afc779c1f57cf64bba6c refs/heads/master" b"\x00 report-status ofs-delta\n", @@ -403,21 +404,21 @@ def generate_pack_data(have, want, ofs_delta=False, progress=None): class TestGetTransportAndPath(TestCase): - def test_tcp(self): + def test_tcp(self) -> None: c, path = get_transport_and_path("git://foo.com/bar/baz") self.assertIsInstance(c, TCPGitClient) self.assertEqual("foo.com", c._host) self.assertEqual(TCP_GIT_PORT, c._port) self.assertEqual("/bar/baz", path) - def test_tcp_port(self): + def test_tcp_port(self) -> None: c, path = get_transport_and_path("git://foo.com:1234/bar/baz") self.assertIsInstance(c, TCPGitClient) self.assertEqual("foo.com", c._host) self.assertEqual(1234, c._port) self.assertEqual("/bar/baz", path) - def test_git_ssh_explicit(self): + def test_git_ssh_explicit(self) -> None: c, path = get_transport_and_path("git+ssh://foo.com/bar/baz") self.assertIsInstance(c, SSHGitClient) self.assertEqual("foo.com", c.host) @@ -425,7 +426,7 @@ def test_git_ssh_explicit(self): self.assertEqual(None, c.username) self.assertEqual("/bar/baz", path) - def test_ssh_explicit(self): + def test_ssh_explicit(self) -> None: c, path = get_transport_and_path("ssh://foo.com/bar/baz") self.assertIsInstance(c, SSHGitClient) self.assertEqual("foo.com", c.host) @@ -433,20 +434,20 @@ def test_ssh_explicit(self): self.assertEqual(None, c.username) self.assertEqual("/bar/baz", path) - def test_ssh_port_explicit(self): + def test_ssh_port_explicit(self) -> None: c, path = get_transport_and_path("git+ssh://foo.com:1234/bar/baz") self.assertIsInstance(c, SSHGitClient) self.assertEqual("foo.com", c.host) self.assertEqual(1234, c.port) self.assertEqual("/bar/baz", path) - def test_username_and_port_explicit_unknown_scheme(self): + def test_username_and_port_explicit_unknown_scheme(self) -> None: c, path = get_transport_and_path("unknown://git@server:7999/dply/stuff.git") self.assertIsInstance(c, SSHGitClient) self.assertEqual("unknown", c.host) self.assertEqual("//git@server:7999/dply/stuff.git", path) - def test_username_and_port_explicit(self): + def test_username_and_port_explicit(self) -> None: c, path = get_transport_and_path("ssh://git@server:7999/dply/stuff.git") self.assertIsInstance(c, SSHGitClient) self.assertEqual("git", c.username) @@ -454,7 +455,7 @@ def test_username_and_port_explicit(self): self.assertEqual(7999, c.port) self.assertEqual("/dply/stuff.git", path) - def test_ssh_abspath_doubleslash(self): + def test_ssh_abspath_doubleslash(self) -> None: c, path = get_transport_and_path("git+ssh://foo.com//bar/baz") self.assertIsInstance(c, SSHGitClient) self.assertEqual("foo.com", c.host) @@ -462,14 +463,14 @@ def test_ssh_abspath_doubleslash(self): self.assertEqual(None, c.username) self.assertEqual("//bar/baz", path) - def test_ssh_port(self): + def test_ssh_port(self) -> None: c, path = get_transport_and_path("git+ssh://foo.com:1234/bar/baz") self.assertIsInstance(c, SSHGitClient) self.assertEqual("foo.com", c.host) self.assertEqual(1234, c.port) self.assertEqual("/bar/baz", path) - def test_ssh_implicit(self): + def test_ssh_implicit(self) -> None: c, path = get_transport_and_path("foo:/bar/baz") self.assertIsInstance(c, SSHGitClient) self.assertEqual("foo", c.host) @@ -477,7 +478,7 @@ def test_ssh_implicit(self): self.assertEqual(None, c.username) self.assertEqual("/bar/baz", path) - def test_ssh_host(self): + def test_ssh_host(self) -> None: c, path = get_transport_and_path("foo.com:/bar/baz") self.assertIsInstance(c, SSHGitClient) self.assertEqual("foo.com", c.host) @@ -485,7 +486,7 @@ def test_ssh_host(self): self.assertEqual(None, c.username) self.assertEqual("/bar/baz", path) - def test_ssh_user_host(self): + def test_ssh_user_host(self) -> None: c, path = get_transport_and_path("user@foo.com:/bar/baz") self.assertIsInstance(c, SSHGitClient) self.assertEqual("foo.com", c.host) @@ -493,7 +494,7 @@ def test_ssh_user_host(self): self.assertEqual("user", c.username) self.assertEqual("/bar/baz", path) - def test_ssh_relpath(self): + def test_ssh_relpath(self) -> None: c, path = get_transport_and_path("foo:bar/baz") self.assertIsInstance(c, SSHGitClient) self.assertEqual("foo", c.host) @@ -501,7 +502,7 @@ def test_ssh_relpath(self): self.assertEqual(None, c.username) self.assertEqual("bar/baz", path) - def test_ssh_host_relpath(self): + def test_ssh_host_relpath(self) -> None: c, path = get_transport_and_path("foo.com:bar/baz") self.assertIsInstance(c, SSHGitClient) self.assertEqual("foo.com", c.host) @@ -509,7 +510,7 @@ def test_ssh_host_relpath(self): self.assertEqual(None, c.username) self.assertEqual("bar/baz", path) - def test_ssh_user_host_relpath(self): + def test_ssh_user_host_relpath(self) -> None: c, path = get_transport_and_path("user@foo.com:bar/baz") self.assertIsInstance(c, SSHGitClient) self.assertEqual("foo.com", c.host) @@ -517,30 +518,30 @@ def test_ssh_user_host_relpath(self): self.assertEqual("user", c.username) self.assertEqual("bar/baz", path) - def test_local(self): + def test_local(self) -> None: c, path = get_transport_and_path("foo.bar/baz") self.assertIsInstance(c, LocalGitClient) self.assertEqual("foo.bar/baz", path) @skipIf(sys.platform != "win32", "Behaviour only happens on windows.") - def test_local_abs_windows_path(self): + def test_local_abs_windows_path(self) -> None: c, path = get_transport_and_path("C:\\foo.bar\\baz") self.assertIsInstance(c, LocalGitClient) self.assertEqual("C:\\foo.bar\\baz", path) - def test_error(self): + def test_error(self) -> None: # Need to use a known urlparse.uses_netloc URL scheme to get the # expected parsing of the URL on Python versions less than 2.6.5 c, path = get_transport_and_path("prospero://bar/baz") self.assertIsInstance(c, SSHGitClient) - def test_http(self): + def test_http(self) -> None: url = "https://github.com/jelmer/dulwich" c, path = get_transport_and_path(url) self.assertIsInstance(c, HttpGitClient) self.assertEqual("/jelmer/dulwich", path) - def test_http_auth(self): + def test_http_auth(self) -> None: url = "https://user:passwd@github.com/jelmer/dulwich" c, path = get_transport_and_path(url) @@ -550,7 +551,7 @@ def test_http_auth(self): self.assertEqual("user", c._username) self.assertEqual("passwd", c._password) - def test_http_auth_with_username(self): + def test_http_auth_with_username(self) -> None: url = "https://github.com/jelmer/dulwich" c, path = get_transport_and_path(url, username="user2", password="blah") @@ -560,7 +561,7 @@ def test_http_auth_with_username(self): self.assertEqual("user2", c._username) self.assertEqual("blah", c._password) - def test_http_auth_with_username_and_in_url(self): + def test_http_auth_with_username_and_in_url(self) -> None: url = "https://user:passwd@github.com/jelmer/dulwich" c, path = get_transport_and_path(url, username="user2", password="blah") @@ -570,7 +571,7 @@ def test_http_auth_with_username_and_in_url(self): self.assertEqual("user", c._username) self.assertEqual("passwd", c._password) - def test_http_no_auth(self): + def test_http_no_auth(self) -> None: url = "https://github.com/jelmer/dulwich" c, path = get_transport_and_path(url) @@ -582,21 +583,21 @@ def test_http_no_auth(self): class TestGetTransportAndPathFromUrl(TestCase): - def test_tcp(self): + def test_tcp(self) -> None: c, path = get_transport_and_path_from_url("git://foo.com/bar/baz") self.assertIsInstance(c, TCPGitClient) self.assertEqual("foo.com", c._host) self.assertEqual(TCP_GIT_PORT, c._port) self.assertEqual("/bar/baz", path) - def test_tcp_port(self): + def test_tcp_port(self) -> None: c, path = get_transport_and_path_from_url("git://foo.com:1234/bar/baz") self.assertIsInstance(c, TCPGitClient) self.assertEqual("foo.com", c._host) self.assertEqual(1234, c._port) self.assertEqual("/bar/baz", path) - def test_ssh_explicit(self): + def test_ssh_explicit(self) -> None: c, path = get_transport_and_path_from_url("git+ssh://foo.com/bar/baz") self.assertIsInstance(c, SSHGitClient) self.assertEqual("foo.com", c.host) @@ -604,14 +605,14 @@ def test_ssh_explicit(self): self.assertEqual(None, c.username) self.assertEqual("/bar/baz", path) - def test_ssh_port_explicit(self): + def test_ssh_port_explicit(self) -> None: c, path = get_transport_and_path_from_url("git+ssh://foo.com:1234/bar/baz") self.assertIsInstance(c, SSHGitClient) self.assertEqual("foo.com", c.host) self.assertEqual(1234, c.port) self.assertEqual("/bar/baz", path) - def test_ssh_homepath(self): + def test_ssh_homepath(self) -> None: c, path = get_transport_and_path_from_url("git+ssh://foo.com/~/bar/baz") self.assertIsInstance(c, SSHGitClient) self.assertEqual("foo.com", c.host) @@ -619,41 +620,41 @@ def test_ssh_homepath(self): self.assertEqual(None, c.username) self.assertEqual("/~/bar/baz", path) - def test_ssh_port_homepath(self): + def test_ssh_port_homepath(self) -> None: c, path = get_transport_and_path_from_url("git+ssh://foo.com:1234/~/bar/baz") self.assertIsInstance(c, SSHGitClient) self.assertEqual("foo.com", c.host) self.assertEqual(1234, c.port) self.assertEqual("/~/bar/baz", path) - def test_ssh_host_relpath(self): + def test_ssh_host_relpath(self) -> None: self.assertRaises( ValueError, get_transport_and_path_from_url, "foo.com:bar/baz" ) - def test_ssh_user_host_relpath(self): + def test_ssh_user_host_relpath(self) -> None: self.assertRaises( ValueError, get_transport_and_path_from_url, "user@foo.com:bar/baz" ) - def test_local_path(self): + def test_local_path(self) -> None: self.assertRaises(ValueError, get_transport_and_path_from_url, "foo.bar/baz") - def test_error(self): + def test_error(self) -> None: # Need to use a known urlparse.uses_netloc URL scheme to get the # expected parsing of the URL on Python versions less than 2.6.5 self.assertRaises( ValueError, get_transport_and_path_from_url, "prospero://bar/baz" ) - def test_http(self): + def test_http(self) -> None: url = "https://github.com/jelmer/dulwich" c, path = get_transport_and_path_from_url(url) self.assertIsInstance(c, HttpGitClient) self.assertEqual("https://github.com", c.get_url(b"/")) self.assertEqual("/jelmer/dulwich", path) - def test_http_port(self): + def test_http_port(self) -> None: url = "https://github.com:9090/jelmer/dulwich" c, path = get_transport_and_path_from_url(url) self.assertEqual("https://github.com:9090", c.get_url(b"/")) @@ -662,14 +663,14 @@ def test_http_port(self): @patch("os.name", "posix") @patch("sys.platform", "linux") - def test_file(self): + def test_file(self) -> None: c, path = get_transport_and_path_from_url("file:///home/jelmer/foo") self.assertIsInstance(c, LocalGitClient) self.assertEqual("/home/jelmer/foo", path) @patch("os.name", "nt") @patch("sys.platform", "win32") - def test_file_win(self): + def test_file_win(self) -> None: # `_win32_url_to_path` uses urllib.request.url2pathname, which is set to # `ntutl2path.url2pathname` when `os.name==nt` from nturl2path import url2pathname @@ -736,7 +737,7 @@ class Subprocess: class SSHGitClientTests(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.server = TestSSHVendor() @@ -745,35 +746,35 @@ def setUp(self): self.client = SSHGitClient("git.samba.org") - def tearDown(self): + def tearDown(self) -> None: super().tearDown() client.get_ssh_vendor = self.real_vendor - def test_get_url(self): + def test_get_url(self) -> None: path = "/tmp/repo.git" c = SSHGitClient("git.samba.org") url = c.get_url(path) self.assertEqual("ssh://git.samba.org/tmp/repo.git", url) - def test_get_url_with_username_and_port(self): + def test_get_url_with_username_and_port(self) -> None: path = "/tmp/repo.git" c = SSHGitClient("git.samba.org", port=2222, username="user") url = c.get_url(path) self.assertEqual("ssh://user@git.samba.org:2222/tmp/repo.git", url) - def test_default_command(self): + def test_default_command(self) -> None: self.assertEqual(b"git-upload-pack", self.client._get_cmd_path(b"upload-pack")) - def test_alternative_command_path(self): + def test_alternative_command_path(self) -> None: self.client.alternative_paths[b"upload-pack"] = b"/usr/lib/git/git-upload-pack" self.assertEqual( b"/usr/lib/git/git-upload-pack", self.client._get_cmd_path(b"upload-pack"), ) - def test_alternative_command_path_spaces(self): + def test_alternative_command_path_spaces(self) -> None: self.client.alternative_paths[b"upload-pack"] = ( b"/usr/lib/git/git-upload-pack -ibla" ) @@ -782,7 +783,7 @@ def test_alternative_command_path_spaces(self): self.client._get_cmd_path(b"upload-pack"), ) - def test_connect(self): + def test_connect(self) -> None: server = self.server client = self.client @@ -797,7 +798,7 @@ def test_connect(self): client._connect(b"relative-command", b"/~/path/to/repo") self.assertEqual("git-relative-command '~/path/to/repo'", server.command) - def test_ssh_command_precedence(self): + def test_ssh_command_precedence(self) -> None: self.overrideEnv("GIT_SSH", "/path/to/ssh") test_client = SSHGitClient("git.samba.org") self.assertEqual(test_client.ssh_command, "/path/to/ssh") @@ -811,21 +812,21 @@ def test_ssh_command_precedence(self): class ReportStatusParserTests(TestCase): - def test_invalid_pack(self): + def test_invalid_pack(self) -> None: parser = ReportStatusParser() parser.handle_packet(b"unpack error - foo bar") parser.handle_packet(b"ok refs/foo/bar") parser.handle_packet(None) self.assertRaises(SendPackError, list, parser.check()) - def test_update_refs_error(self): + def test_update_refs_error(self) -> None: parser = ReportStatusParser() parser.handle_packet(b"unpack ok") parser.handle_packet(b"ng refs/foo/bar need to pull") parser.handle_packet(None) self.assertEqual([(b"refs/foo/bar", "need to pull")], list(parser.check())) - def test_ok(self): + def test_ok(self) -> None: parser = ReportStatusParser() parser.handle_packet(b"unpack ok") parser.handle_packet(b"ok refs/foo/bar") @@ -834,14 +835,14 @@ def test_ok(self): class LocalGitClientTests(TestCase): - def test_get_url(self): + def test_get_url(self) -> None: path = "/tmp/repo.git" c = LocalGitClient() url = c.get_url(path) self.assertEqual("file:///tmp/repo.git", url) - def test_fetch_into_empty(self): + def test_fetch_into_empty(self) -> None: c = LocalGitClient() target = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, target) @@ -851,7 +852,7 @@ def test_fetch_into_empty(self): self.addCleanup(tear_down_repo, s) self.assertEqual(s.get_refs(), c.fetch(s.path, t).refs) - def test_clone(self): + def test_clone(self) -> None: c = LocalGitClient() s = open_repo("a.git") self.addCleanup(tear_down_repo, s) @@ -864,7 +865,7 @@ def test_clone(self): expected[b"refs/remotes/origin/master"] = expected[b"refs/heads/master"] self.assertEqual(expected, result_repo.get_refs()) - def test_fetch_empty(self): + def test_fetch_empty(self) -> None: c = LocalGitClient() s = open_repo("a.git") self.addCleanup(tear_down_repo, s) @@ -889,7 +890,7 @@ def test_fetch_empty(self): out.getvalue(), ) - def test_fetch_pack_none(self): + def test_fetch_pack_none(self) -> None: c = LocalGitClient() s = open_repo("a.git") self.addCleanup(tear_down_repo, s) @@ -916,7 +917,7 @@ def test_fetch_pack_none(self): out.getvalue().startswith(b"PACK\x00\x00\x00\x02\x00\x00\x00\x07") ) - def test_send_pack_without_changes(self): + def test_send_pack_without_changes(self) -> None: local = open_repo("a.git") self.addCleanup(tear_down_repo, local) @@ -925,7 +926,7 @@ def test_send_pack_without_changes(self): self.send_and_verify(b"master", local, target) - def test_send_pack_with_changes(self): + def test_send_pack_with_changes(self) -> None: local = open_repo("a.git") self.addCleanup(tear_down_repo, local) @@ -934,7 +935,7 @@ def test_send_pack_with_changes(self): with Repo.init_bare(target_path) as target: self.send_and_verify(b"master", local, target) - def test_get_refs(self): + def test_get_refs(self) -> None: local = open_repo("refs.git") self.addCleanup(tear_down_repo, local) @@ -942,7 +943,7 @@ def test_get_refs(self): refs = client.get_refs(local.path) self.assertDictEqual(local.refs.as_dict(), refs) - def send_and_verify(self, branch, local, target): + def send_and_verify(self, branch, local, target) -> None: """Send branch from local to remote repository and verify it worked.""" client = LocalGitClient() ref_name = b"refs/heads/" + branch @@ -962,7 +963,7 @@ def send_and_verify(self, branch, local, target): class HttpGitClientTests(TestCase): - def test_get_url(self): + def test_get_url(self) -> None: base_url = "https://github.com/jelmer/dulwich" path = "/jelmer/dulwich" c = HttpGitClient(base_url) @@ -970,7 +971,7 @@ def test_get_url(self): url = c.get_url(path) self.assertEqual("https://github.com/jelmer/dulwich", url) - def test_get_url_bytes_path(self): + def test_get_url_bytes_path(self) -> None: base_url = "https://github.com/jelmer/dulwich" path_bytes = b"/jelmer/dulwich" c = HttpGitClient(base_url) @@ -978,7 +979,7 @@ def test_get_url_bytes_path(self): url = c.get_url(path_bytes) self.assertEqual("https://github.com/jelmer/dulwich", url) - def test_get_url_with_username_and_passwd(self): + def test_get_url_with_username_and_passwd(self) -> None: base_url = "https://github.com/jelmer/dulwich" path = "/jelmer/dulwich" c = HttpGitClient(base_url, username="USERNAME", password="PASSWD") @@ -986,7 +987,7 @@ def test_get_url_with_username_and_passwd(self): url = c.get_url(path) self.assertEqual("https://github.com/jelmer/dulwich", url) - def test_init_username_passwd_set(self): + def test_init_username_passwd_set(self) -> None: url = "https://github.com/jelmer/dulwich" c = HttpGitClient(url, config=None, username="user", password="passwd") @@ -999,7 +1000,7 @@ def test_init_username_passwd_set(self): expected_basic_auth = "Basic {}".format(b64_credentials.decode("latin1")) self.assertEqual(basic_auth, expected_basic_auth) - def test_init_username_set_no_password(self): + def test_init_username_set_no_password(self) -> None: url = "https://github.com/jelmer/dulwich" c = HttpGitClient(url, config=None, username="user") @@ -1012,7 +1013,7 @@ def test_init_username_set_no_password(self): expected_basic_auth = f"Basic {b64_credentials.decode('ascii')}" self.assertEqual(basic_auth, expected_basic_auth) - def test_init_no_username_passwd(self): + def test_init_no_username_passwd(self) -> None: url = "https://github.com/jelmer/dulwich" c = HttpGitClient(url, config=None) @@ -1020,7 +1021,7 @@ def test_init_no_username_passwd(self): self.assertIs(None, c._password) self.assertNotIn("authorization", c.pool_manager.headers) - def test_from_parsedurl_username_only(self): + def test_from_parsedurl_username_only(self) -> None: username = "user" url = f"https://{username}@github.com/jelmer/dulwich" @@ -1034,7 +1035,7 @@ def test_from_parsedurl_username_only(self): expected_basic_auth = f"Basic {b64_credentials.decode('ascii')}" self.assertEqual(basic_auth, expected_basic_auth) - def test_from_parsedurl_on_url_with_quoted_credentials(self): + def test_from_parsedurl_on_url_with_quoted_credentials(self) -> None: original_username = "john|the|first" quoted_username = urlquote(original_username) @@ -1053,7 +1054,7 @@ def test_from_parsedurl_on_url_with_quoted_credentials(self): expected_basic_auth = "Basic {}".format(b64_credentials.decode("latin1")) self.assertEqual(basic_auth, expected_basic_auth) - def test_url_redirect_location(self): + def test_url_redirect_location(self) -> None: from urllib3.response import HTTPResponse test_data = { @@ -1157,7 +1158,7 @@ def request( # check also the no redirection case self.assertEqual(processed_url, base_url) - def test_smart_request_content_type_with_directive_check(self): + def test_smart_request_content_type_with_directive_check(self) -> None: from urllib3.response import HTTPResponse # we need to mock urllib3.PoolManager as this test will fail @@ -1189,7 +1190,7 @@ def request( client = HttpGitClient(clone_url, pool_manager=PoolManagerMock(), config=None) self.assertTrue(client._smart_request("git-upload-pack", clone_url, data=None)) - def test_urllib3_protocol_error(self): + def test_urllib3_protocol_error(self) -> None: from urllib3.exceptions import ProtocolError from urllib3.response import HTTPResponse @@ -1218,7 +1219,7 @@ def request( status=200, ) - def read(self): + def read(self) -> NoReturn: raise ProtocolError(error_msg) # override HTTPResponse.read to throw urllib3.exceptions.ProtocolError @@ -1236,7 +1237,7 @@ def check_heads(heads, **kwargs): class TCPGitClientTests(TestCase): - def test_get_url(self): + def test_get_url(self) -> None: host = "github.com" path = "/jelmer/dulwich" c = TCPGitClient(host) @@ -1244,7 +1245,7 @@ def test_get_url(self): url = c.get_url(path) self.assertEqual("git://github.com/jelmer/dulwich", url) - def test_get_url_with_port(self): + def test_get_url_with_port(self) -> None: host = "github.com" path = "/jelmer/dulwich" port = 9090 @@ -1255,18 +1256,18 @@ def test_get_url_with_port(self): class DefaultUrllib3ManagerTest(TestCase): - def test_no_config(self): + def test_no_config(self) -> None: manager = default_urllib3_manager(config=None) self.assertEqual(manager.connection_pool_kw["cert_reqs"], "CERT_REQUIRED") - def test_config_no_proxy(self): + def test_config_no_proxy(self) -> None: import urllib3 manager = default_urllib3_manager(config=ConfigDict()) self.assertNotIsInstance(manager, urllib3.ProxyManager) self.assertIsInstance(manager, urllib3.PoolManager) - def test_config_no_proxy_custom_cls(self): + def test_config_no_proxy_custom_cls(self) -> None: import urllib3 class CustomPoolManager(urllib3.PoolManager): @@ -1277,19 +1278,19 @@ class CustomPoolManager(urllib3.PoolManager): ) self.assertIsInstance(manager, CustomPoolManager) - def test_config_ssl(self): + def test_config_ssl(self) -> None: config = ConfigDict() config.set(b"http", b"sslVerify", b"true") manager = default_urllib3_manager(config=config) self.assertEqual(manager.connection_pool_kw["cert_reqs"], "CERT_REQUIRED") - def test_config_no_ssl(self): + def test_config_no_ssl(self) -> None: config = ConfigDict() config.set(b"http", b"sslVerify", b"false") manager = default_urllib3_manager(config=config) self.assertEqual(manager.connection_pool_kw["cert_reqs"], "CERT_NONE") - def test_config_proxy(self): + def test_config_proxy(self) -> None: import urllib3 config = ConfigDict() @@ -1302,7 +1303,7 @@ def test_config_proxy(self): self.assertEqual(manager.proxy.host, "localhost") self.assertEqual(manager.proxy.port, 3128) - def test_environment_proxy(self): + def test_environment_proxy(self) -> None: import urllib3 config = ConfigDict() @@ -1314,7 +1315,7 @@ def test_environment_proxy(self): self.assertEqual(manager.proxy.host, "myproxy") self.assertEqual(manager.proxy.port, 8080) - def test_environment_empty_proxy(self): + def test_environment_empty_proxy(self) -> None: import urllib3 config = ConfigDict() @@ -1323,7 +1324,7 @@ def test_environment_empty_proxy(self): self.assertNotIsInstance(manager, urllib3.ProxyManager) self.assertIsInstance(manager, urllib3.PoolManager) - def test_environment_no_proxy_1(self): + def test_environment_no_proxy_1(self) -> None: import urllib3 config = ConfigDict() @@ -1334,7 +1335,7 @@ def test_environment_no_proxy_1(self): self.assertNotIsInstance(manager, urllib3.ProxyManager) self.assertIsInstance(manager, urllib3.PoolManager) - def test_environment_no_proxy_2(self): + def test_environment_no_proxy_2(self) -> None: import urllib3 config = ConfigDict() @@ -1345,7 +1346,7 @@ def test_environment_no_proxy_2(self): self.assertNotIsInstance(manager, urllib3.ProxyManager) self.assertIsInstance(manager, urllib3.PoolManager) - def test_environment_no_proxy_3(self): + def test_environment_no_proxy_3(self) -> None: import urllib3 config = ConfigDict() @@ -1356,7 +1357,7 @@ def test_environment_no_proxy_3(self): self.assertNotIsInstance(manager, urllib3.ProxyManager) self.assertIsInstance(manager, urllib3.PoolManager) - def test_environment_no_proxy_4(self): + def test_environment_no_proxy_4(self) -> None: import urllib3 config = ConfigDict() @@ -1367,7 +1368,7 @@ def test_environment_no_proxy_4(self): self.assertNotIsInstance(manager, urllib3.ProxyManager) self.assertIsInstance(manager, urllib3.PoolManager) - def test_environment_no_proxy_5(self): + def test_environment_no_proxy_5(self) -> None: import urllib3 config = ConfigDict() @@ -1381,7 +1382,7 @@ def test_environment_no_proxy_5(self): self.assertEqual(manager.proxy.host, "myproxy") self.assertEqual(manager.proxy.port, 8080) - def test_environment_no_proxy_6(self): + def test_environment_no_proxy_6(self) -> None: import urllib3 config = ConfigDict() @@ -1395,7 +1396,7 @@ def test_environment_no_proxy_6(self): self.assertEqual(manager.proxy.host, "myproxy") self.assertEqual(manager.proxy.port, 8080) - def test_environment_no_proxy_ipv4_address_1(self): + def test_environment_no_proxy_ipv4_address_1(self) -> None: import urllib3 config = ConfigDict() @@ -1406,7 +1407,7 @@ def test_environment_no_proxy_ipv4_address_1(self): self.assertNotIsInstance(manager, urllib3.ProxyManager) self.assertIsInstance(manager, urllib3.PoolManager) - def test_environment_no_proxy_ipv4_address_2(self): + def test_environment_no_proxy_ipv4_address_2(self) -> None: import urllib3 config = ConfigDict() @@ -1417,7 +1418,7 @@ def test_environment_no_proxy_ipv4_address_2(self): self.assertNotIsInstance(manager, urllib3.ProxyManager) self.assertIsInstance(manager, urllib3.PoolManager) - def test_environment_no_proxy_ipv4_address_3(self): + def test_environment_no_proxy_ipv4_address_3(self) -> None: import urllib3 config = ConfigDict() @@ -1430,7 +1431,7 @@ def test_environment_no_proxy_ipv4_address_3(self): self.assertNotIsInstance(manager, urllib3.ProxyManager) self.assertIsInstance(manager, urllib3.PoolManager) - def test_environment_no_proxy_ipv6_address_1(self): + def test_environment_no_proxy_ipv6_address_1(self) -> None: import urllib3 config = ConfigDict() @@ -1441,7 +1442,7 @@ def test_environment_no_proxy_ipv6_address_1(self): self.assertNotIsInstance(manager, urllib3.ProxyManager) self.assertIsInstance(manager, urllib3.PoolManager) - def test_environment_no_proxy_ipv6_address_2(self): + def test_environment_no_proxy_ipv6_address_2(self) -> None: import urllib3 config = ConfigDict() @@ -1452,7 +1453,7 @@ def test_environment_no_proxy_ipv6_address_2(self): self.assertNotIsInstance(manager, urllib3.ProxyManager) self.assertIsInstance(manager, urllib3.PoolManager) - def test_environment_no_proxy_ipv6_address_3(self): + def test_environment_no_proxy_ipv6_address_3(self) -> None: import urllib3 config = ConfigDict() @@ -1465,7 +1466,7 @@ def test_environment_no_proxy_ipv6_address_3(self): self.assertNotIsInstance(manager, urllib3.ProxyManager) self.assertIsInstance(manager, urllib3.PoolManager) - def test_config_proxy_custom_cls(self): + def test_config_proxy_custom_cls(self) -> None: import urllib3 class CustomProxyManager(urllib3.ProxyManager): @@ -1478,7 +1479,7 @@ class CustomProxyManager(urllib3.ProxyManager): ) self.assertIsInstance(manager, CustomProxyManager) - def test_config_proxy_creds(self): + def test_config_proxy_creds(self) -> None: import urllib3 config = ConfigDict() @@ -1489,21 +1490,21 @@ def test_config_proxy_creds(self): manager.proxy_headers, {"proxy-authorization": "Basic amVsbWVyOmV4YW1wbGU="} ) - def test_config_no_verify_ssl(self): + def test_config_no_verify_ssl(self) -> None: manager = default_urllib3_manager(config=None, cert_reqs="CERT_NONE") self.assertEqual(manager.connection_pool_kw["cert_reqs"], "CERT_NONE") class SubprocessSSHVendorTests(TestCase): - def setUp(self): + def setUp(self) -> None: # Monkey Patch client subprocess popen self._orig_popen = dulwich.client.subprocess.Popen dulwich.client.subprocess.Popen = DummyPopen - def tearDown(self): + def tearDown(self) -> None: dulwich.client.subprocess.Popen = self._orig_popen - def test_run_command_dashes(self): + def test_run_command_dashes(self) -> None: vendor = SubprocessSSHVendor() self.assertRaises( StrangeHostname, @@ -1512,7 +1513,7 @@ def test_run_command_dashes(self): "git-clone-url", ) - def test_run_command_password(self): + def test_run_command_password(self) -> None: vendor = SubprocessSSHVendor() self.assertRaises( NotImplementedError, @@ -1522,7 +1523,7 @@ def test_run_command_password(self): password="12345", ) - def test_run_command_password_and_privkey(self): + def test_run_command_password_and_privkey(self) -> None: vendor = SubprocessSSHVendor() self.assertRaises( NotImplementedError, @@ -1533,7 +1534,7 @@ def test_run_command_password_and_privkey(self): key_filename="/tmp/id_rsa", ) - def test_run_command_with_port_username_and_privkey(self): + def test_run_command_with_port_username_and_privkey(self) -> None: expected = [ "ssh", "-x", @@ -1565,7 +1566,7 @@ def test_run_command_with_port_username_and_privkey(self): self.assertListEqual(expected, args[0]) - def test_run_with_ssh_command(self): + def test_run_with_ssh_command(self) -> None: expected = [ "/path/to/ssh", "-o", @@ -1594,15 +1595,15 @@ def test_run_with_ssh_command(self): class PLinkSSHVendorTests(TestCase): - def setUp(self): + def setUp(self) -> None: # Monkey Patch client subprocess popen self._orig_popen = dulwich.client.subprocess.Popen dulwich.client.subprocess.Popen = DummyPopen - def tearDown(self): + def tearDown(self) -> None: dulwich.client.subprocess.Popen = self._orig_popen - def test_run_command_dashes(self): + def test_run_command_dashes(self) -> None: vendor = PLinkSSHVendor() self.assertRaises( StrangeHostname, @@ -1611,7 +1612,7 @@ def test_run_command_dashes(self): "git-clone-url", ) - def test_run_command_password_and_privkey(self): + def test_run_command_password_and_privkey(self) -> None: vendor = PLinkSSHVendor() warnings.simplefilter("always", UserWarning) @@ -1656,7 +1657,7 @@ def test_run_command_password_and_privkey(self): ] self.assertListEqual(expected, args[0]) - def test_run_command_password(self): + def test_run_command_password(self) -> None: if sys.platform == "win32": binary = ["plink.exe", "-ssh"] else: @@ -1689,7 +1690,7 @@ def test_run_command_password(self): self.assertListEqual(expected, args[0]) - def test_run_command_with_port_username_and_privkey(self): + def test_run_command_with_port_username_and_privkey(self) -> None: if sys.platform == "win32": binary = ["plink.exe", "-ssh"] else: @@ -1717,7 +1718,7 @@ def test_run_command_with_port_username_and_privkey(self): self.assertListEqual(expected, args[0]) - def test_run_with_ssh_command(self): + def test_run_with_ssh_command(self) -> None: expected = [ "/path/to/plink", "-ssh", @@ -1737,24 +1738,24 @@ def test_run_with_ssh_command(self): class RsyncUrlTests(TestCase): - def test_simple(self): + def test_simple(self) -> None: self.assertEqual(parse_rsync_url("foo:bar/path"), (None, "foo", "bar/path")) self.assertEqual( parse_rsync_url("user@foo:bar/path"), ("user", "foo", "bar/path") ) - def test_path(self): + def test_path(self) -> None: self.assertRaises(ValueError, parse_rsync_url, "/path") class CheckWantsTests(TestCase): - def test_fine(self): + def test_fine(self) -> None: check_wants( [b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262"], {b"refs/heads/blah": b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262"}, ) - def test_missing(self): + def test_missing(self) -> None: self.assertRaises( InvalidWants, check_wants, @@ -1762,7 +1763,7 @@ def test_missing(self): {b"refs/heads/blah": b"3f3dc7a53fb752a6961d3a56683df46d4d3bf262"}, ) - def test_annotated(self): + def test_annotated(self) -> None: self.assertRaises( InvalidWants, check_wants, @@ -1775,7 +1776,7 @@ def test_annotated(self): class FetchPackResultTests(TestCase): - def test_eq(self): + def test_eq(self) -> None: self.assertEqual( FetchPackResult( {b"refs/heads/master": b"2f3dc7a53fb752a6961d3a56683df46d4d3bf262"}, @@ -1792,34 +1793,34 @@ def test_eq(self): class GitCredentialStoreTests(TestCase): @classmethod - def setUpClass(cls): + def setUpClass(cls) -> None: with tempfile.NamedTemporaryFile(delete=False) as f: f.write(b"https://user:pass@example.org\n") cls.fname = f.name @classmethod - def tearDownClass(cls): + def tearDownClass(cls) -> None: os.unlink(cls.fname) - def test_nonmatching_scheme(self): + def test_nonmatching_scheme(self) -> None: self.assertEqual( get_credentials_from_store(b"http", b"example.org", fnames=[self.fname]), None, ) - def test_nonmatching_hostname(self): + def test_nonmatching_hostname(self) -> None: self.assertEqual( get_credentials_from_store(b"https", b"noentry.org", fnames=[self.fname]), None, ) - def test_match_without_username(self): + def test_match_without_username(self) -> None: self.assertEqual( get_credentials_from_store(b"https", b"example.org", fnames=[self.fname]), (b"user", b"pass"), ) - def test_match_with_matching_username(self): + def test_match_with_matching_username(self) -> None: self.assertEqual( get_credentials_from_store( b"https", b"example.org", b"user", fnames=[self.fname] @@ -1827,7 +1828,7 @@ def test_match_with_matching_username(self): (b"user", b"pass"), ) - def test_no_match_with_nonmatching_username(self): + def test_no_match_with_nonmatching_username(self) -> None: self.assertEqual( get_credentials_from_store( b"https", b"example.org", b"otheruser", fnames=[self.fname] @@ -1837,10 +1838,10 @@ def test_no_match_with_nonmatching_username(self): class RemoteErrorFromStderrTests(TestCase): - def test_nothing(self): + def test_nothing(self) -> None: self.assertEqual(_remote_error_from_stderr(None), HangupException()) - def test_error_line(self): + def test_error_line(self) -> None: b = BytesIO( b"""\ This is some random output. @@ -1853,7 +1854,7 @@ def test_error_line(self): GitProtocolError("This is the actual error"), ) - def test_no_error_line(self): + def test_no_error_line(self) -> None: b = BytesIO( b"""\ This is output without an error line. @@ -1872,7 +1873,7 @@ def test_no_error_line(self): class TestExtractAgentAndSymrefs(TestCase): - def test_extract_agent_and_symrefs(self): + def test_extract_agent_and_symrefs(self) -> None: (symrefs, agent) = _extract_symrefs_and_agent( [b"agent=git/2.31.1", b"symref=HEAD:refs/heads/master"] ) diff --git a/tests/test_config.py b/tests/test_config.py index 5fb0afd8f..1fc4de872 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -46,13 +46,13 @@ class ConfigFileTests(TestCase): def from_file(self, text): return ConfigFile.from_file(BytesIO(text)) - def test_empty(self): + def test_empty(self) -> None: ConfigFile() - def test_eq(self): + def test_eq(self) -> None: self.assertEqual(ConfigFile(), ConfigFile()) - def test_default_config(self): + def test_default_config(self) -> None: cf = self.from_file( b"""[core] \trepositoryformatversion = 0 @@ -75,127 +75,127 @@ def test_default_config(self): cf, ) - def test_from_file_empty(self): + def test_from_file_empty(self) -> None: cf = self.from_file(b"") self.assertEqual(ConfigFile(), cf) - def test_empty_line_before_section(self): + def test_empty_line_before_section(self) -> None: cf = self.from_file(b"\n[section]\n") self.assertEqual(ConfigFile({(b"section",): {}}), cf) - def test_comment_before_section(self): + def test_comment_before_section(self) -> None: cf = self.from_file(b"# foo\n[section]\n") self.assertEqual(ConfigFile({(b"section",): {}}), cf) - def test_comment_after_section(self): + def test_comment_after_section(self) -> None: cf = self.from_file(b"[section] # foo\n") self.assertEqual(ConfigFile({(b"section",): {}}), cf) - def test_comment_after_variable(self): + def test_comment_after_variable(self) -> None: cf = self.from_file(b"[section]\nbar= foo # a comment\n") self.assertEqual(ConfigFile({(b"section",): {b"bar": b"foo"}}), cf) - def test_comment_character_within_value_string(self): + def test_comment_character_within_value_string(self) -> None: cf = self.from_file(b'[section]\nbar= "foo#bar"\n') self.assertEqual(ConfigFile({(b"section",): {b"bar": b"foo#bar"}}), cf) - def test_comment_character_within_section_string(self): + def test_comment_character_within_section_string(self) -> None: cf = self.from_file(b'[branch "foo#bar"] # a comment\nbar= foo\n') self.assertEqual(ConfigFile({(b"branch", b"foo#bar"): {b"bar": b"foo"}}), cf) - def test_closing_bracket_within_section_string(self): + def test_closing_bracket_within_section_string(self) -> None: cf = self.from_file(b'[branch "foo]bar"] # a comment\nbar= foo\n') self.assertEqual(ConfigFile({(b"branch", b"foo]bar"): {b"bar": b"foo"}}), cf) - def test_from_file_section(self): + def test_from_file_section(self) -> None: cf = self.from_file(b"[core]\nfoo = bar\n") self.assertEqual(b"bar", cf.get((b"core",), b"foo")) self.assertEqual(b"bar", cf.get((b"core", b"foo"), b"foo")) - def test_from_file_multiple(self): + def test_from_file_multiple(self) -> None: cf = self.from_file(b"[core]\nfoo = bar\nfoo = blah\n") self.assertEqual([b"bar", b"blah"], list(cf.get_multivar((b"core",), b"foo"))) self.assertEqual([], list(cf.get_multivar((b"core",), b"blah"))) - def test_from_file_utf8_bom(self): + def test_from_file_utf8_bom(self) -> None: text = "[core]\nfoo = b\u00e4r\n".encode("utf-8-sig") cf = self.from_file(text) self.assertEqual(b"b\xc3\xa4r", cf.get((b"core",), b"foo")) - def test_from_file_section_case_insensitive_lower(self): + def test_from_file_section_case_insensitive_lower(self) -> None: cf = self.from_file(b"[cOre]\nfOo = bar\n") self.assertEqual(b"bar", cf.get((b"core",), b"foo")) self.assertEqual(b"bar", cf.get((b"core", b"foo"), b"foo")) - def test_from_file_section_case_insensitive_mixed(self): + def test_from_file_section_case_insensitive_mixed(self) -> None: cf = self.from_file(b"[cOre]\nfOo = bar\n") self.assertEqual(b"bar", cf.get((b"core",), b"fOo")) self.assertEqual(b"bar", cf.get((b"cOre", b"fOo"), b"fOo")) - def test_from_file_with_mixed_quoted(self): + def test_from_file_with_mixed_quoted(self) -> None: cf = self.from_file(b'[core]\nfoo = "bar"la\n') self.assertEqual(b"barla", cf.get((b"core",), b"foo")) - def test_from_file_section_with_open_brackets(self): + def test_from_file_section_with_open_brackets(self) -> None: self.assertRaises(ValueError, self.from_file, b"[core\nfoo = bar\n") - def test_from_file_value_with_open_quoted(self): + def test_from_file_value_with_open_quoted(self) -> None: self.assertRaises(ValueError, self.from_file, b'[core]\nfoo = "bar\n') - def test_from_file_with_quotes(self): + def test_from_file_with_quotes(self) -> None: cf = self.from_file(b"[core]\n" b'foo = " bar"\n') self.assertEqual(b" bar", cf.get((b"core",), b"foo")) - def test_from_file_with_interrupted_line(self): + def test_from_file_with_interrupted_line(self) -> None: cf = self.from_file(b"[core]\n" b"foo = bar\\\n" b" la\n") self.assertEqual(b"barla", cf.get((b"core",), b"foo")) - def test_from_file_with_boolean_setting(self): + def test_from_file_with_boolean_setting(self) -> None: cf = self.from_file(b"[core]\n" b"foo\n") self.assertEqual(b"true", cf.get((b"core",), b"foo")) - def test_from_file_subsection(self): + def test_from_file_subsection(self) -> None: cf = self.from_file(b'[branch "foo"]\nfoo = bar\n') self.assertEqual(b"bar", cf.get((b"branch", b"foo"), b"foo")) - def test_from_file_subsection_invalid(self): + def test_from_file_subsection_invalid(self) -> None: self.assertRaises(ValueError, self.from_file, b'[branch "foo]\nfoo = bar\n') - def test_from_file_subsection_not_quoted(self): + def test_from_file_subsection_not_quoted(self) -> None: cf = self.from_file(b"[branch.foo]\nfoo = bar\n") self.assertEqual(b"bar", cf.get((b"branch", b"foo"), b"foo")) - def test_write_preserve_multivar(self): + def test_write_preserve_multivar(self) -> None: cf = self.from_file(b"[core]\nfoo = bar\nfoo = blah\n") f = BytesIO() cf.write_to_file(f) self.assertEqual(b"[core]\n\tfoo = bar\n\tfoo = blah\n", f.getvalue()) - def test_write_to_file_empty(self): + def test_write_to_file_empty(self) -> None: c = ConfigFile() f = BytesIO() c.write_to_file(f) self.assertEqual(b"", f.getvalue()) - def test_write_to_file_section(self): + def test_write_to_file_section(self) -> None: c = ConfigFile() c.set((b"core",), b"foo", b"bar") f = BytesIO() c.write_to_file(f) self.assertEqual(b"[core]\n\tfoo = bar\n", f.getvalue()) - def test_write_to_file_subsection(self): + def test_write_to_file_subsection(self) -> None: c = ConfigFile() c.set((b"branch", b"blie"), b"foo", b"bar") f = BytesIO() c.write_to_file(f) self.assertEqual(b'[branch "blie"]\n\tfoo = bar\n', f.getvalue()) - def test_same_line(self): + def test_same_line(self) -> None: cf = self.from_file(b"[branch.foo] foo = bar\n") self.assertEqual(b"bar", cf.get((b"branch", b"foo"), b"foo")) - def test_quoted_newlines_windows(self): + def test_quoted_newlines_windows(self) -> None: cf = self.from_file( b"[alias]\r\n" b"c = '!f() { \\\r\n" @@ -209,7 +209,7 @@ def test_quoted_newlines_windows(self): cf.get((b"alias",), b"c"), ) - def test_quoted(self): + def test_quoted(self) -> None: cf = self.from_file( b"""[gui] \tfontdiff = -family \\\"Ubuntu Mono\\\" -size 11 -overstrike 0 @@ -226,7 +226,7 @@ def test_quoted(self): cf, ) - def test_quoted_multiline(self): + def test_quoted_multiline(self) -> None: cf = self.from_file( b"""[alias] who = \"!who() {\\ @@ -249,7 +249,7 @@ def test_quoted_multiline(self): cf, ) - def test_set_hash_gets_quoted(self): + def test_set_hash_gets_quoted(self) -> None: c = ConfigFile() c.set(b"xandikos", b"color", b"#665544") f = BytesIO() @@ -258,7 +258,7 @@ def test_set_hash_gets_quoted(self): class ConfigDictTests(TestCase): - def test_get_set(self): + def test_get_set(self) -> None: cd = ConfigDict() self.assertRaises(KeyError, cd.get, b"foo", b"core") cd.set((b"core",), b"foo", b"bla") @@ -266,7 +266,7 @@ def test_get_set(self): cd.set((b"core",), b"foo", b"bloe") self.assertEqual(b"bloe", cd.get((b"core",), b"foo")) - def test_get_boolean(self): + def test_get_boolean(self) -> None: cd = ConfigDict() cd.set((b"core",), b"foo", b"true") self.assertTrue(cd.get_boolean((b"core",), b"foo")) @@ -275,7 +275,7 @@ def test_get_boolean(self): cd.set((b"core",), b"foo", b"invalid") self.assertRaises(ValueError, cd.get_boolean, (b"core",), b"foo") - def test_dict(self): + def test_dict(self) -> None: cd = ConfigDict() cd.set((b"core",), b"foo", b"bla") cd.set((b"core2",), b"foo", b"bloe") @@ -286,20 +286,20 @@ def test_dict(self): cd[b"a"] = b"b" self.assertEqual(cd[b"a"], b"b") - def test_items(self): + def test_items(self) -> None: cd = ConfigDict() cd.set((b"core",), b"foo", b"bla") cd.set((b"core2",), b"foo", b"bloe") self.assertEqual([(b"foo", b"bla")], list(cd.items((b"core",)))) - def test_items_nonexistant(self): + def test_items_nonexistant(self) -> None: cd = ConfigDict() cd.set((b"core2",), b"foo", b"bloe") self.assertEqual([], list(cd.items((b"core",)))) - def test_sections(self): + def test_sections(self) -> None: cd = ConfigDict() cd.set((b"core2",), b"foo", b"bloe") @@ -307,11 +307,11 @@ def test_sections(self): class StackedConfigTests(TestCase): - def test_default_backends(self): + def test_default_backends(self) -> None: StackedConfig.default_backends() @skipIf(sys.platform != "win32", "Windows specific config location.") - def test_windows_config_from_path(self): + def test_windows_config_from_path(self) -> None: from dulwich.config import get_win_system_paths install_dir = os.path.join("C:", "foo", "Git") @@ -327,7 +327,7 @@ def test_windows_config_from_path(self): ) @skipIf(sys.platform != "win32", "Windows specific config location.") - def test_windows_config_from_reg(self): + def test_windows_config_from_reg(self) -> None: import winreg from dulwich.config import get_win_system_paths @@ -350,66 +350,66 @@ def test_windows_config_from_reg(self): class EscapeValueTests(TestCase): - def test_nothing(self): + def test_nothing(self) -> None: self.assertEqual(b"foo", _escape_value(b"foo")) - def test_backslash(self): + def test_backslash(self) -> None: self.assertEqual(b"foo\\\\", _escape_value(b"foo\\")) - def test_newline(self): + def test_newline(self) -> None: self.assertEqual(b"foo\\n", _escape_value(b"foo\n")) class FormatStringTests(TestCase): - def test_quoted(self): + def test_quoted(self) -> None: self.assertEqual(b'" foo"', _format_string(b" foo")) self.assertEqual(b'"\\tfoo"', _format_string(b"\tfoo")) - def test_not_quoted(self): + def test_not_quoted(self) -> None: self.assertEqual(b"foo", _format_string(b"foo")) self.assertEqual(b"foo bar", _format_string(b"foo bar")) class ParseStringTests(TestCase): - def test_quoted(self): + def test_quoted(self) -> None: self.assertEqual(b" foo", _parse_string(b'" foo"')) self.assertEqual(b"\tfoo", _parse_string(b'"\\tfoo"')) - def test_not_quoted(self): + def test_not_quoted(self) -> None: self.assertEqual(b"foo", _parse_string(b"foo")) self.assertEqual(b"foo bar", _parse_string(b"foo bar")) - def test_nothing(self): + def test_nothing(self) -> None: self.assertEqual(b"", _parse_string(b"")) - def test_tab(self): + def test_tab(self) -> None: self.assertEqual(b"\tbar\t", _parse_string(b"\\tbar\\t")) - def test_newline(self): + def test_newline(self) -> None: self.assertEqual(b"\nbar\t", _parse_string(b"\\nbar\\t\t")) - def test_quote(self): + def test_quote(self) -> None: self.assertEqual(b'"foo"', _parse_string(b'\\"foo\\"')) class CheckVariableNameTests(TestCase): - def test_invalid(self): + def test_invalid(self) -> None: self.assertFalse(_check_variable_name(b"foo ")) self.assertFalse(_check_variable_name(b"bar,bar")) self.assertFalse(_check_variable_name(b"bar.bar")) - def test_valid(self): + def test_valid(self) -> None: self.assertTrue(_check_variable_name(b"FOO")) self.assertTrue(_check_variable_name(b"foo")) self.assertTrue(_check_variable_name(b"foo-bar")) class CheckSectionNameTests(TestCase): - def test_invalid(self): + def test_invalid(self) -> None: self.assertFalse(_check_section_name(b"foo ")) self.assertFalse(_check_section_name(b"bar,bar")) - def test_valid(self): + def test_valid(self) -> None: self.assertTrue(_check_section_name(b"FOO")) self.assertTrue(_check_section_name(b"foo")) self.assertTrue(_check_section_name(b"foo-bar")) @@ -417,7 +417,7 @@ def test_valid(self): class SubmodulesTests(TestCase): - def testSubmodules(self): + def testSubmodules(self) -> None: cf = ConfigFile.from_file( BytesIO( b"""\ @@ -439,7 +439,7 @@ def testSubmodules(self): got, ) - def testMalformedSubmodules(self): + def testMalformedSubmodules(self) -> None: cf = ConfigFile.from_file( BytesIO( b"""\ @@ -466,20 +466,20 @@ def testMalformedSubmodules(self): class ApplyInsteadOfTests(TestCase): - def test_none(self): + def test_none(self) -> None: config = ConfigDict() self.assertEqual( "https://example.com/", apply_instead_of(config, "https://example.com/") ) - def test_apply(self): + def test_apply(self) -> None: config = ConfigDict() config.set(("url", "https://samba.org/"), "insteadOf", "https://example.com/") self.assertEqual( "https://samba.org/", apply_instead_of(config, "https://example.com/") ) - def test_apply_multiple(self): + def test_apply_multiple(self) -> None: config = ConfigDict() config.set(("url", "https://samba.org/"), "insteadOf", "https://blah.com/") config.set(("url", "https://samba.org/"), "insteadOf", "https://example.com/") diff --git a/tests/test_credentials.py b/tests/test_credentials.py index a677d7d79..5afa0b014 100644 --- a/tests/test_credentials.py +++ b/tests/test_credentials.py @@ -32,7 +32,7 @@ class TestCredentialHelpersUtils(TestCase): - def test_match_urls(self): + def test_match_urls(self) -> None: url = urlparse("https://github.com/jelmer/dulwich/") url_1 = urlparse("https://github.com/jelmer/dulwich") url_2 = urlparse("https://github.com/jelmer") @@ -44,7 +44,7 @@ def test_match_urls(self): non_matching = urlparse("https://git.sr.ht/") self.assertFalse(match_urls(url, non_matching)) - def test_match_partial_url(self): + def test_match_partial_url(self) -> None: url = urlparse("https://github.com/jelmer/dulwich/") self.assertTrue(match_partial_url(url, "github.com")) self.assertFalse(match_partial_url(url, "github.com/jelmer/")) @@ -52,7 +52,7 @@ def test_match_partial_url(self): self.assertFalse(match_partial_url(url, "github.com/jel")) self.assertFalse(match_partial_url(url, "github.com/jel/")) - def test_urlmatch_credential_sections(self): + def test_urlmatch_credential_sections(self) -> None: config = ConfigDict() config.set((b"credential", "https://github.com"), b"helper", "foo") config.set((b"credential", "git.sr.ht"), b"helper", "foo") diff --git a/tests/test_diff_tree.py b/tests/test_diff_tree.py index f7f4a9cc5..14a8e76ff 100644 --- a/tests/test_diff_tree.py +++ b/tests/test_diff_tree.py @@ -49,7 +49,7 @@ class DiffTestCase(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.store = MemoryObjectStore() self.empty_tree = self.commit_tree([]) @@ -72,16 +72,16 @@ def commit_tree(self, entries): class TreeChangesTest(DiffTestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.detector = RenameDetector(self.store) - def assertMergeFails(self, merge_entries, name, mode, sha): + def assertMergeFails(self, merge_entries, name, mode, sha) -> None: t = Tree() t[name] = (mode, sha) self.assertRaises((TypeError, ValueError), merge_entries, "", t, t) - def _do_test_merge_entries(self, merge_entries): + def _do_test_merge_entries(self, merge_entries) -> None: blob_a1 = make_object(Blob, data=b"a1") blob_a2 = make_object(Blob, data=b"a2") blob_b1 = make_object(Blob, data=b"b1") @@ -140,7 +140,7 @@ def _do_test_merge_entries(self, merge_entries): _do_test_merge_entries, _merge_entries ) - def _do_test_is_tree(self, is_tree): + def _do_test_is_tree(self, is_tree) -> None: self.assertFalse(is_tree(TreeEntry(None, None, None))) self.assertFalse(is_tree(TreeEntry(b"a", 0o100644, b"a" * 40))) self.assertFalse(is_tree(TreeEntry(b"a", 0o100755, b"a" * 40))) @@ -152,16 +152,16 @@ def _do_test_is_tree(self, is_tree): test_is_tree = functest_builder(_do_test_is_tree, _is_tree_py) test_is_tree_extension = ext_functest_builder(_do_test_is_tree, _is_tree) - def assertChangesEqual(self, expected, tree1, tree2, **kwargs): + def assertChangesEqual(self, expected, tree1, tree2, **kwargs) -> None: actual = list(tree_changes(self.store, tree1.id, tree2.id, **kwargs)) self.assertEqual(expected, actual) # For brevity, the following tests use tuples instead of TreeEntry objects. - def test_tree_changes_empty(self): + def test_tree_changes_empty(self) -> None: self.assertChangesEqual([], self.empty_tree, self.empty_tree) - def test_tree_changes_no_changes(self): + def test_tree_changes_no_changes(self) -> None: blob = make_object(Blob, data=b"blob") tree = self.commit_tree([(b"a", blob), (b"b/c", blob)]) self.assertChangesEqual([], self.empty_tree, self.empty_tree) @@ -180,7 +180,7 @@ def test_tree_changes_no_changes(self): want_unchanged=True, ) - def test_tree_changes_add_delete(self): + def test_tree_changes_add_delete(self) -> None: blob_a = make_object(Blob, data=b"a") blob_b = make_object(Blob, data=b"b") tree = self.commit_tree([(b"a", blob_a, 0o100644), (b"x/b", blob_b, 0o100755)]) @@ -201,7 +201,7 @@ def test_tree_changes_add_delete(self): self.empty_tree, ) - def test_tree_changes_modify_contents(self): + def test_tree_changes_modify_contents(self) -> None: blob_a1 = make_object(Blob, data=b"a1") blob_a2 = make_object(Blob, data=b"a2") tree1 = self.commit_tree([(b"a", blob_a1)]) @@ -212,7 +212,7 @@ def test_tree_changes_modify_contents(self): tree2, ) - def test_tree_changes_modify_mode(self): + def test_tree_changes_modify_mode(self) -> None: blob_a = make_object(Blob, data=b"a") tree1 = self.commit_tree([(b"a", blob_a, 0o100644)]) tree2 = self.commit_tree([(b"a", blob_a, 0o100755)]) @@ -228,7 +228,7 @@ def test_tree_changes_modify_mode(self): tree2, ) - def test_tree_changes_change_type(self): + def test_tree_changes_change_type(self) -> None: blob_a1 = make_object(Blob, data=b"a") blob_a2 = make_object(Blob, data=b"/foo/bar") tree1 = self.commit_tree([(b"a", blob_a1, 0o100644)]) @@ -242,7 +242,7 @@ def test_tree_changes_change_type(self): tree2, ) - def test_tree_changes_change_type_same(self): + def test_tree_changes_change_type_same(self) -> None: blob_a1 = make_object(Blob, data=b"a") blob_a2 = make_object(Blob, data=b"/foo/bar") tree1 = self.commit_tree([(b"a", blob_a1, 0o100644)]) @@ -260,7 +260,7 @@ def test_tree_changes_change_type_same(self): change_type_same=True, ) - def test_tree_changes_to_tree(self): + def test_tree_changes_to_tree(self) -> None: blob_a = make_object(Blob, data=b"a") blob_x = make_object(Blob, data=b"x") tree1 = self.commit_tree([(b"a", blob_a)]) @@ -274,7 +274,7 @@ def test_tree_changes_to_tree(self): tree2, ) - def test_tree_changes_complex(self): + def test_tree_changes_complex(self) -> None: blob_a_1 = make_object(Blob, data=b"a1_1") blob_bx1_1 = make_object(Blob, data=b"bx1_1") blob_bx2_1 = make_object(Blob, data=b"bx2_1") @@ -320,7 +320,7 @@ def test_tree_changes_complex(self): tree2, ) - def test_tree_changes_name_order(self): + def test_tree_changes_name_order(self) -> None: blob = make_object(Blob, data=b"a") tree1 = self.commit_tree([(b"a", blob), (b"a.", blob), (b"a..", blob)]) # Tree order is the reverse of this, so if we used tree order, 'a..' @@ -338,7 +338,7 @@ def test_tree_changes_name_order(self): tree2, ) - def test_tree_changes_prune(self): + def test_tree_changes_prune(self) -> None: blob_a1 = make_object(Blob, data=b"a1") blob_a2 = make_object(Blob, data=b"a2") blob_x = make_object(Blob, data=b"x") @@ -356,7 +356,7 @@ def test_tree_changes_prune(self): tree2, ) - def test_tree_changes_rename_detector(self): + def test_tree_changes_rename_detector(self) -> None: blob_a1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob_a2 = make_object(Blob, data=b"a\nb\nc\ne\n") blob_b = make_object(Blob, data=b"b") @@ -407,7 +407,7 @@ def test_tree_changes_rename_detector(self): want_unchanged=True, ) - def assertChangesForMergeEqual(self, expected, parent_trees, merge_tree, **kwargs): + def assertChangesForMergeEqual(self, expected, parent_trees, merge_tree, **kwargs) -> None: parent_tree_ids = [t.id for t in parent_trees] actual = list( tree_changes_for_merge(self.store, parent_tree_ids, merge_tree.id, **kwargs) @@ -421,14 +421,14 @@ def assertChangesForMergeEqual(self, expected, parent_trees, merge_tree, **kwarg ) self.assertEqual(expected, actual) - def test_tree_changes_for_merge_add_no_conflict(self): + def test_tree_changes_for_merge_add_no_conflict(self) -> None: blob = make_object(Blob, data=b"blob") parent1 = self.commit_tree([]) parent2 = merge = self.commit_tree([(b"a", blob)]) self.assertChangesForMergeEqual([], [parent1, parent2], merge) self.assertChangesForMergeEqual([], [parent2, parent2], merge) - def test_tree_changes_for_merge_add_modify_conflict(self): + def test_tree_changes_for_merge_add_modify_conflict(self) -> None: blob1 = make_object(Blob, data=b"1") blob2 = make_object(Blob, data=b"2") parent1 = self.commit_tree([]) @@ -445,7 +445,7 @@ def test_tree_changes_for_merge_add_modify_conflict(self): merge, ) - def test_tree_changes_for_merge_modify_modify_conflict(self): + def test_tree_changes_for_merge_modify_modify_conflict(self) -> None: blob1 = make_object(Blob, data=b"1") blob2 = make_object(Blob, data=b"2") blob3 = make_object(Blob, data=b"3") @@ -463,14 +463,14 @@ def test_tree_changes_for_merge_modify_modify_conflict(self): merge, ) - def test_tree_changes_for_merge_modify_no_conflict(self): + def test_tree_changes_for_merge_modify_no_conflict(self) -> None: blob1 = make_object(Blob, data=b"1") blob2 = make_object(Blob, data=b"2") parent1 = self.commit_tree([(b"a", blob1)]) parent2 = merge = self.commit_tree([(b"a", blob2)]) self.assertChangesForMergeEqual([], [parent1, parent2], merge) - def test_tree_changes_for_merge_delete_delete_conflict(self): + def test_tree_changes_for_merge_delete_delete_conflict(self) -> None: blob1 = make_object(Blob, data=b"1") blob2 = make_object(Blob, data=b"2") parent1 = self.commit_tree([(b"a", blob1)]) @@ -487,14 +487,14 @@ def test_tree_changes_for_merge_delete_delete_conflict(self): merge, ) - def test_tree_changes_for_merge_delete_no_conflict(self): + def test_tree_changes_for_merge_delete_no_conflict(self) -> None: blob = make_object(Blob, data=b"blob") has = self.commit_tree([(b"a", blob)]) doesnt_have = self.commit_tree([]) self.assertChangesForMergeEqual([], [has, has], doesnt_have) self.assertChangesForMergeEqual([], [has, doesnt_have], doesnt_have) - def test_tree_changes_for_merge_octopus_no_conflict(self): + def test_tree_changes_for_merge_octopus_no_conflict(self) -> None: r = list(range(5)) blobs = [make_object(Blob, data=bytes(i)) for i in r] parents = [self.commit_tree([(b"a", blobs[i])]) for i in r] @@ -502,7 +502,7 @@ def test_tree_changes_for_merge_octopus_no_conflict(self): # Take the SHA from each of the parents. self.assertChangesForMergeEqual([], parents, parents[i]) - def test_tree_changes_for_merge_octopus_modify_conflict(self): + def test_tree_changes_for_merge_octopus_modify_conflict(self) -> None: # Because the octopus merge strategy is limited, I doubt it's possible # to create this with the git command line. But the output is well- # defined, so test it anyway. @@ -523,7 +523,7 @@ def test_tree_changes_for_merge_octopus_modify_conflict(self): ] self.assertChangesForMergeEqual(expected, parents, merge) - def test_tree_changes_for_merge_octopus_delete(self): + def test_tree_changes_for_merge_octopus_delete(self) -> None: blob1 = make_object(Blob, data=b"1") blob2 = make_object(Blob, data=b"3") parent1 = self.commit_tree([(b"a", blob1)]) @@ -544,7 +544,7 @@ def test_tree_changes_for_merge_octopus_delete(self): merge, ) - def test_tree_changes_for_merge_add_add_same_conflict(self): + def test_tree_changes_for_merge_add_add_same_conflict(self) -> None: blob = make_object(Blob, data=b"a\nb\nc\nd\n") parent1 = self.commit_tree([(b"a", blob)]) parent2 = self.commit_tree([]) @@ -552,7 +552,7 @@ def test_tree_changes_for_merge_add_add_same_conflict(self): add = TreeChange.add((b"b", F, blob.id)) self.assertChangesForMergeEqual([[add, add]], [parent1, parent2], merge) - def test_tree_changes_for_merge_add_exact_rename_conflict(self): + def test_tree_changes_for_merge_add_exact_rename_conflict(self) -> None: blob = make_object(Blob, data=b"a\nb\nc\nd\n") parent1 = self.commit_tree([(b"a", blob)]) parent2 = self.commit_tree([]) @@ -569,7 +569,7 @@ def test_tree_changes_for_merge_add_exact_rename_conflict(self): rename_detector=self.detector, ) - def test_tree_changes_for_merge_add_content_rename_conflict(self): + def test_tree_changes_for_merge_add_content_rename_conflict(self) -> None: blob1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob2 = make_object(Blob, data=b"a\nb\nc\ne\n") parent1 = self.commit_tree([(b"a", blob1)]) @@ -587,7 +587,7 @@ def test_tree_changes_for_merge_add_content_rename_conflict(self): rename_detector=self.detector, ) - def test_tree_changes_for_merge_modify_rename_conflict(self): + def test_tree_changes_for_merge_modify_rename_conflict(self) -> None: blob1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob2 = make_object(Blob, data=b"a\nb\nc\ne\n") parent1 = self.commit_tree([(b"a", blob1)]) @@ -607,7 +607,7 @@ def test_tree_changes_for_merge_modify_rename_conflict(self): class RenameDetectionTest(DiffTestCase): - def _do_test_count_blocks(self, count_blocks): + def _do_test_count_blocks(self, count_blocks) -> None: blob = make_object(Blob, data=b"a\nb\na\n") self.assertBlockCountEqual({b"a\n": 4, b"b\n": 2}, count_blocks(blob)) @@ -616,7 +616,7 @@ def _do_test_count_blocks(self, count_blocks): _do_test_count_blocks, _count_blocks ) - def _do_test_count_blocks_no_newline(self, count_blocks): + def _do_test_count_blocks_no_newline(self, count_blocks) -> None: blob = make_object(Blob, data=b"a\na") self.assertBlockCountEqual({b"a\n": 2, b"a": 1}, _count_blocks(blob)) @@ -627,13 +627,13 @@ def _do_test_count_blocks_no_newline(self, count_blocks): _do_test_count_blocks_no_newline, _count_blocks ) - def assertBlockCountEqual(self, expected, got): + def assertBlockCountEqual(self, expected, got) -> None: self.assertEqual( {(hash(block) & 0xFFFFFFFF): count for (block, count) in expected.items()}, {(block & 0xFFFFFFFF): count for (block, count) in got.items()}, ) - def _do_test_count_blocks_chunks(self, count_blocks): + def _do_test_count_blocks_chunks(self, count_blocks) -> None: blob = ShaFile.from_raw_chunks(Blob.type_num, [b"a\nb", b"\na\n"]) self.assertBlockCountEqual({b"a\n": 4, b"b\n": 2}, _count_blocks(blob)) @@ -644,7 +644,7 @@ def _do_test_count_blocks_chunks(self, count_blocks): _do_test_count_blocks_chunks, _count_blocks ) - def _do_test_count_blocks_long_lines(self, count_blocks): + def _do_test_count_blocks_long_lines(self, count_blocks) -> None: a = b"a" * 64 data = a + b"xxx\ny\n" + a + b"zzz\n" blob = make_object(Blob, data=data) @@ -660,11 +660,11 @@ def _do_test_count_blocks_long_lines(self, count_blocks): _do_test_count_blocks_long_lines, _count_blocks ) - def assertSimilar(self, expected_score, blob1, blob2): + def assertSimilar(self, expected_score, blob1, blob2) -> None: self.assertEqual(expected_score, _similarity_score(blob1, blob2)) self.assertEqual(expected_score, _similarity_score(blob2, blob1)) - def test_similarity_score(self): + def test_similarity_score(self) -> None: blob0 = make_object(Blob, data=b"") blob1 = make_object(Blob, data=b"ab\ncd\ncd\n") blob2 = make_object(Blob, data=b"ab\n") @@ -679,7 +679,7 @@ def test_similarity_score(self): self.assertSimilar(0, blob2, blob3) self.assertSimilar(50, blob3, blob4) - def test_similarity_score_cache(self): + def test_similarity_score_cache(self) -> None: blob1 = make_object(Blob, data=b"ab\ncd\n") blob2 = make_object(Blob, data=b"ab\n") @@ -687,7 +687,7 @@ def test_similarity_score_cache(self): self.assertEqual(50, _similarity_score(blob1, blob2, block_cache=block_cache)) self.assertEqual({blob1.id, blob2.id}, set(block_cache)) - def fail_chunks(): + def fail_chunks() -> None: self.fail("Unexpected call to as_raw_chunks()") blob1.as_raw_chunks = blob2.as_raw_chunks = fail_chunks @@ -695,7 +695,7 @@ def fail_chunks(): blob2.raw_length = lambda: 3 self.assertEqual(50, _similarity_score(blob1, blob2, block_cache=block_cache)) - def test_tree_entry_sort(self): + def test_tree_entry_sort(self) -> None: sha = "abcd" * 10 expected_entries = [ TreeChange.add(TreeEntry(b"aaa", F, sha)), @@ -726,7 +726,7 @@ def detect_renames(self, tree1, tree2, want_unchanged=False, **kwargs): tree1.id, tree2.id, want_unchanged=want_unchanged ) - def test_no_renames(self): + def test_no_renames(self) -> None: blob1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob2 = make_object(Blob, data=b"a\nb\ne\nf\n") blob3 = make_object(Blob, data=b"a\nb\ng\nh\n") @@ -737,7 +737,7 @@ def test_no_renames(self): self.detect_renames(tree1, tree2), ) - def test_exact_rename_one_to_one(self): + def test_exact_rename_one_to_one(self) -> None: blob1 = make_object(Blob, data=b"1") blob2 = make_object(Blob, data=b"2") tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)]) @@ -750,7 +750,7 @@ def test_exact_rename_one_to_one(self): self.detect_renames(tree1, tree2), ) - def test_exact_rename_split_different_type(self): + def test_exact_rename_split_different_type(self) -> None: blob = make_object(Blob, data=b"/foo") tree1 = self.commit_tree([(b"a", blob, 0o100644)]) tree2 = self.commit_tree([(b"a", blob, 0o120000)]) @@ -762,7 +762,7 @@ def test_exact_rename_split_different_type(self): self.detect_renames(tree1, tree2), ) - def test_exact_rename_and_different_type(self): + def test_exact_rename_and_different_type(self) -> None: blob1 = make_object(Blob, data=b"1") blob2 = make_object(Blob, data=b"2") tree1 = self.commit_tree([(b"a", blob1)]) @@ -775,7 +775,7 @@ def test_exact_rename_and_different_type(self): self.detect_renames(tree1, tree2), ) - def test_exact_rename_one_to_many(self): + def test_exact_rename_one_to_many(self) -> None: blob = make_object(Blob, data=b"1") tree1 = self.commit_tree([(b"a", blob)]) tree2 = self.commit_tree([(b"b", blob), (b"c", blob)]) @@ -787,7 +787,7 @@ def test_exact_rename_one_to_many(self): self.detect_renames(tree1, tree2), ) - def test_exact_rename_many_to_one(self): + def test_exact_rename_many_to_one(self) -> None: blob = make_object(Blob, data=b"1") tree1 = self.commit_tree([(b"a", blob), (b"b", blob)]) tree2 = self.commit_tree([(b"c", blob)]) @@ -799,7 +799,7 @@ def test_exact_rename_many_to_one(self): self.detect_renames(tree1, tree2), ) - def test_exact_rename_many_to_many(self): + def test_exact_rename_many_to_many(self) -> None: blob = make_object(Blob, data=b"1") tree1 = self.commit_tree([(b"a", blob), (b"b", blob)]) tree2 = self.commit_tree([(b"c", blob), (b"d", blob), (b"e", blob)]) @@ -812,7 +812,7 @@ def test_exact_rename_many_to_many(self): self.detect_renames(tree1, tree2), ) - def test_exact_copy_modify(self): + def test_exact_copy_modify(self) -> None: blob1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob2 = make_object(Blob, data=b"a\nb\nc\ne\n") tree1 = self.commit_tree([(b"a", blob1)]) @@ -825,7 +825,7 @@ def test_exact_copy_modify(self): self.detect_renames(tree1, tree2), ) - def test_exact_copy_change_mode(self): + def test_exact_copy_change_mode(self) -> None: blob = make_object(Blob, data=b"a\nb\nc\nd\n") tree1 = self.commit_tree([(b"a", blob)]) tree2 = self.commit_tree([(b"a", blob, 0o100755), (b"b", blob)]) @@ -841,7 +841,7 @@ def test_exact_copy_change_mode(self): self.detect_renames(tree1, tree2), ) - def test_rename_threshold(self): + def test_rename_threshold(self) -> None: blob1 = make_object(Blob, data=b"a\nb\nc\n") blob2 = make_object(Blob, data=b"a\nb\nd\n") tree1 = self.commit_tree([(b"a", blob1)]) @@ -858,7 +858,7 @@ def test_rename_threshold(self): self.detect_renames(tree1, tree2, rename_threshold=75), ) - def test_content_rename_max_files(self): + def test_content_rename_max_files(self) -> None: blob1 = make_object(Blob, data=b"a\nb\nc\nd") blob4 = make_object(Blob, data=b"a\nb\nc\ne\n") blob2 = make_object(Blob, data=b"e\nf\ng\nh\n") @@ -882,7 +882,7 @@ def test_content_rename_max_files(self): self.detect_renames(tree1, tree2, max_files=1), ) - def test_content_rename_one_to_one(self): + def test_content_rename_one_to_one(self) -> None: b11 = make_object(Blob, data=b"a\nb\nc\nd\n") b12 = make_object(Blob, data=b"a\nb\nc\ne\n") b21 = make_object(Blob, data=b"e\nf\ng\n\nh") @@ -897,7 +897,7 @@ def test_content_rename_one_to_one(self): self.detect_renames(tree1, tree2), ) - def test_content_rename_one_to_one_ordering(self): + def test_content_rename_one_to_one_ordering(self) -> None: blob1 = make_object(Blob, data=b"a\nb\nc\nd\ne\nf\n") blob2 = make_object(Blob, data=b"a\nb\nc\nd\ng\nh\n") # 6/10 match to blob1, 8/10 match to blob2 @@ -922,7 +922,7 @@ def test_content_rename_one_to_one_ordering(self): self.detect_renames(tree3, tree4), ) - def test_content_rename_one_to_many(self): + def test_content_rename_one_to_many(self) -> None: blob1 = make_object(Blob, data=b"aa\nb\nc\nd\ne\n") blob2 = make_object(Blob, data=b"ab\nb\nc\nd\ne\n") # 8/11 match blob3 = make_object(Blob, data=b"aa\nb\nc\nd\nf\n") # 9/11 match @@ -936,7 +936,7 @@ def test_content_rename_one_to_many(self): self.detect_renames(tree1, tree2), ) - def test_content_rename_many_to_one(self): + def test_content_rename_many_to_one(self) -> None: blob1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob2 = make_object(Blob, data=b"a\nb\nc\ne\n") blob3 = make_object(Blob, data=b"a\nb\nc\nf\n") @@ -950,7 +950,7 @@ def test_content_rename_many_to_one(self): self.detect_renames(tree1, tree2), ) - def test_content_rename_many_to_many(self): + def test_content_rename_many_to_many(self) -> None: blob1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob2 = make_object(Blob, data=b"a\nb\nc\ne\n") blob3 = make_object(Blob, data=b"a\nb\nc\nf\n") @@ -968,7 +968,7 @@ def test_content_rename_many_to_many(self): self.detect_renames(tree1, tree2), ) - def test_content_rename_with_more_deletions(self): + def test_content_rename_with_more_deletions(self) -> None: blob1 = make_object(Blob, data=b"") tree1 = self.commit_tree( [(b"a", blob1), (b"b", blob1), (b"c", blob1), (b"d", blob1)] @@ -985,7 +985,7 @@ def test_content_rename_with_more_deletions(self): self.detect_renames(tree1, tree2), ) - def test_content_rename_gitlink(self): + def test_content_rename_gitlink(self) -> None: blob1 = make_object(Blob, data=b"blob1") blob2 = make_object(Blob, data=b"blob2") link1 = b"1" * 40 @@ -1002,7 +1002,7 @@ def test_content_rename_gitlink(self): self.detect_renames(tree1, tree2), ) - def test_exact_rename_swap(self): + def test_exact_rename_swap(self) -> None: blob1 = make_object(Blob, data=b"1") blob2 = make_object(Blob, data=b"2") tree1 = self.commit_tree([(b"a", blob1), (b"b", blob2)]) @@ -1022,7 +1022,7 @@ def test_exact_rename_swap(self): self.detect_renames(tree1, tree2, rewrite_threshold=50), ) - def test_content_rename_swap(self): + def test_content_rename_swap(self) -> None: blob1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob2 = make_object(Blob, data=b"e\nf\ng\nh\n") blob3 = make_object(Blob, data=b"a\nb\nc\ne\n") @@ -1037,7 +1037,7 @@ def test_content_rename_swap(self): self.detect_renames(tree1, tree2, rewrite_threshold=60), ) - def test_rewrite_threshold(self): + def test_rewrite_threshold(self) -> None: blob1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob2 = make_object(Blob, data=b"a\nb\nc\ne\n") blob3 = make_object(Blob, data=b"a\nb\nf\ng\n") @@ -1061,7 +1061,7 @@ def test_rewrite_threshold(self): self.detect_renames(tree1, tree2, rewrite_threshold=80), ) - def test_find_copies_harder_exact(self): + def test_find_copies_harder_exact(self) -> None: blob = make_object(Blob, data=b"blob") tree1 = self.commit_tree([(b"a", blob)]) tree2 = self.commit_tree([(b"a", blob), (b"b", blob)]) @@ -1074,7 +1074,7 @@ def test_find_copies_harder_exact(self): self.detect_renames(tree1, tree2, find_copies_harder=True), ) - def test_find_copies_harder_content(self): + def test_find_copies_harder_content(self) -> None: blob1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob2 = make_object(Blob, data=b"a\nb\nc\ne\n") tree1 = self.commit_tree([(b"a", blob1)]) @@ -1088,7 +1088,7 @@ def test_find_copies_harder_content(self): self.detect_renames(tree1, tree2, find_copies_harder=True), ) - def test_find_copies_harder_with_rewrites(self): + def test_find_copies_harder_with_rewrites(self) -> None: blob_a1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob_a2 = make_object(Blob, data=b"f\ng\nh\ni\n") blob_b2 = make_object(Blob, data=b"a\nb\nc\ne\n") @@ -1111,7 +1111,7 @@ def test_find_copies_harder_with_rewrites(self): ), ) - def test_reuse_detector(self): + def test_reuse_detector(self) -> None: blob = make_object(Blob, data=b"blob") tree1 = self.commit_tree([(b"a", blob)]) tree2 = self.commit_tree([(b"b", blob)]) @@ -1120,7 +1120,7 @@ def test_reuse_detector(self): self.assertEqual(changes, detector.changes_with_renames(tree1.id, tree2.id)) self.assertEqual(changes, detector.changes_with_renames(tree1.id, tree2.id)) - def test_want_unchanged(self): + def test_want_unchanged(self) -> None: blob_a1 = make_object(Blob, data=b"a\nb\nc\nd\n") blob_b = make_object(Blob, data=b"b") blob_c2 = make_object(Blob, data=b"a\nb\nc\ne\n") diff --git a/tests/test_fastexport.py b/tests/test_fastexport.py index 86e3a0565..5e1a40941 100644 --- a/tests/test_fastexport.py +++ b/tests/test_fastexport.py @@ -32,7 +32,7 @@ class GitFastExporterTests(TestCase): """Tests for the GitFastExporter tests.""" - def setUp(self): + def setUp(self) -> None: super().setUp() self.store = MemoryObjectStore() self.stream = BytesIO() @@ -42,13 +42,13 @@ def setUp(self): raise SkipTest("python-fastimport not available") from exc self.fastexporter = GitFastExporter(self.stream, self.store) - def test_emit_blob(self): + def test_emit_blob(self) -> None: b = Blob() b.data = b"fooBAR" self.fastexporter.emit_blob(b) self.assertEqual(b"blob\nmark :1\ndata 6\nfooBAR\n", self.stream.getvalue()) - def test_emit_commit(self): + def test_emit_commit(self) -> None: b = Blob() b.data = b"FOO" t = Tree() @@ -81,7 +81,7 @@ def test_emit_commit(self): class GitImportProcessorTests(TestCase): """Tests for the GitImportProcessor tests.""" - def setUp(self): + def setUp(self) -> None: super().setUp() self.repo = MemoryRepo() try: @@ -90,7 +90,7 @@ def setUp(self): raise SkipTest("python-fastimport not available") from exc self.processor = GitImportProcessor(self.repo) - def test_reset_handler(self): + def test_reset_handler(self) -> None: from fastimport import commands [c1] = build_commit_graph(self.repo.object_store, [[1]]) @@ -99,7 +99,7 @@ def test_reset_handler(self): self.assertEqual(c1.id, self.repo.get_refs()[b"refs/heads/foo"]) self.assertEqual(c1.id, self.processor.last_commit) - def test_reset_handler_marker(self): + def test_reset_handler_marker(self) -> None: from fastimport import commands [c1, c2] = build_commit_graph(self.repo.object_store, [[1], [2]]) @@ -108,7 +108,7 @@ def test_reset_handler_marker(self): self.processor.reset_handler(cmd) self.assertEqual(c1.id, self.repo.get_refs()[b"refs/heads/foo"]) - def test_reset_handler_default(self): + def test_reset_handler_default(self) -> None: from fastimport import commands [c1, c2] = build_commit_graph(self.repo.object_store, [[1], [2]]) @@ -116,7 +116,7 @@ def test_reset_handler_default(self): self.processor.reset_handler(cmd) self.assertEqual(ZERO_SHA, self.repo.get_refs()[b"refs/heads/foo"]) - def test_commit_handler(self): + def test_commit_handler(self) -> None: from fastimport import commands cmd = commands.CommitCommand( @@ -141,7 +141,7 @@ def test_commit_handler(self): self.assertEqual(3600, commit.author_timezone) self.assertEqual(commit, self.repo[b"refs/heads/foo"]) - def test_commit_handler_markers(self): + def test_commit_handler_markers(self) -> None: from fastimport import commands [c1, c2, c3] = build_commit_graph(self.repo.object_store, [[1], [2], [3]]) @@ -164,7 +164,7 @@ def test_commit_handler_markers(self): self.assertEqual(c2.id, commit.parents[1]) self.assertEqual(c3.id, commit.parents[2]) - def test_import_stream(self): + def test_import_stream(self) -> None: markers = self.processor.import_stream( BytesIO( b"""blob @@ -186,7 +186,7 @@ def test_import_stream(self): self.assertIsInstance(self.repo[markers[b"1"]], Blob) self.assertIsInstance(self.repo[markers[b"2"]], Commit) - def test_file_add(self): + def test_file_add(self) -> None: from fastimport import commands cmd = commands.BlobCommand(b"23", b"data") @@ -249,7 +249,7 @@ def make_file_commit(self, file_cmds): self.processor.commit_handler(cmd) return self.repo[self.processor.last_commit] - def test_file_copy(self): + def test_file_copy(self) -> None: from fastimport import commands self.simple_commit() @@ -270,7 +270,7 @@ def test_file_copy(self): self.repo[commit.tree].items(), ) - def test_file_move(self): + def test_file_move(self) -> None: from fastimport import commands self.simple_commit() @@ -288,14 +288,14 @@ def test_file_move(self): self.repo[commit.tree].items(), ) - def test_file_delete(self): + def test_file_delete(self) -> None: from fastimport import commands self.simple_commit() commit = self.make_file_commit([commands.FileDeleteCommand(b"path")]) self.assertEqual([], self.repo[commit.tree].items()) - def test_file_deleteall(self): + def test_file_deleteall(self) -> None: from fastimport import commands self.simple_commit() diff --git a/tests/test_file.py b/tests/test_file.py index 8180c8b90..435b807f1 100644 --- a/tests/test_file.py +++ b/tests/test_file.py @@ -30,26 +30,26 @@ class FancyRenameTests(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self._tempdir = tempfile.mkdtemp() self.foo = self.path("foo") self.bar = self.path("bar") self.create(self.foo, b"foo contents") - def tearDown(self): + def tearDown(self) -> None: shutil.rmtree(self._tempdir) super().tearDown() def path(self, filename): return os.path.join(self._tempdir, filename) - def create(self, path, contents): + def create(self, path, contents) -> None: f = open(path, "wb") f.write(contents) f.close() - def test_no_dest_exists(self): + def test_no_dest_exists(self) -> None: self.assertFalse(os.path.exists(self.bar)) _fancy_rename(self.foo, self.bar) self.assertFalse(os.path.exists(self.foo)) @@ -58,7 +58,7 @@ def test_no_dest_exists(self): self.assertEqual(b"foo contents", new_f.read()) new_f.close() - def test_dest_exists(self): + def test_dest_exists(self) -> None: self.create(self.bar, b"bar contents") _fancy_rename(self.foo, self.bar) self.assertFalse(os.path.exists(self.foo)) @@ -67,7 +67,7 @@ def test_dest_exists(self): self.assertEqual(b"foo contents", new_f.read()) new_f.close() - def test_dest_opened(self): + def test_dest_opened(self) -> None: if sys.platform != "win32": raise SkipTest("platform allows overwriting open files") self.create(self.bar, b"bar contents") @@ -86,21 +86,21 @@ def test_dest_opened(self): class GitFileTests(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self._tempdir = tempfile.mkdtemp() f = open(self.path("foo"), "wb") f.write(b"foo contents") f.close() - def tearDown(self): + def tearDown(self) -> None: shutil.rmtree(self._tempdir) super().tearDown() def path(self, filename): return os.path.join(self._tempdir, filename) - def test_invalid(self): + def test_invalid(self) -> None: foo = self.path("foo") self.assertRaises(IOError, GitFile, foo, mode="r") self.assertRaises(IOError, GitFile, foo, mode="ab") @@ -108,7 +108,7 @@ def test_invalid(self): self.assertRaises(IOError, GitFile, foo, mode="w+b") self.assertRaises(IOError, GitFile, foo, mode="a+bU") - def test_readonly(self): + def test_readonly(self) -> None: f = GitFile(self.path("foo"), "rb") self.assertIsInstance(f, io.IOBase) self.assertEqual(b"foo contents", f.read()) @@ -117,12 +117,12 @@ def test_readonly(self): self.assertEqual(b"contents", f.read()) f.close() - def test_default_mode(self): + def test_default_mode(self) -> None: f = GitFile(self.path("foo")) self.assertEqual(b"foo contents", f.read()) f.close() - def test_write(self): + def test_write(self) -> None: foo = self.path("foo") foo_lock = f"{foo}.lock" @@ -146,7 +146,7 @@ def test_write(self): self.assertEqual(b"new contents", new_f.read()) new_f.close() - def test_open_twice(self): + def test_open_twice(self) -> None: foo = self.path("foo") f1 = GitFile(foo, "wb") f1.write(b"new") @@ -165,7 +165,7 @@ def test_open_twice(self): self.assertEqual(b"new contents", f.read()) f.close() - def test_abort(self): + def test_abort(self) -> None: foo = self.path("foo") foo_lock = f"{foo}.lock" @@ -183,7 +183,7 @@ def test_abort(self): self.assertEqual(new_orig_f.read(), b"foo contents") new_orig_f.close() - def test_abort_close(self): + def test_abort_close(self) -> None: foo = self.path("foo") f = GitFile(foo, "wb") f.abort() @@ -199,7 +199,7 @@ def test_abort_close(self): except OSError: self.fail() - def test_abort_close_removed(self): + def test_abort_close_removed(self) -> None: foo = self.path("foo") f = GitFile(foo, "wb") diff --git a/tests/test_grafts.py b/tests/test_grafts.py index f345391b0..ce2fcdf7e 100644 --- a/tests/test_grafts.py +++ b/tests/test_grafts.py @@ -35,22 +35,22 @@ def makesha(digit): class GraftParserTests(TestCase): - def assertParse(self, expected, graftpoints): + def assertParse(self, expected, graftpoints) -> None: self.assertEqual(expected, parse_graftpoints(iter(graftpoints))) - def test_no_grafts(self): + def test_no_grafts(self) -> None: self.assertParse({}, []) - def test_no_parents(self): + def test_no_parents(self) -> None: self.assertParse({makesha(0): []}, [makesha(0)]) - def test_parents(self): + def test_parents(self) -> None: self.assertParse( {makesha(0): [makesha(1), makesha(2)]}, [b" ".join([makesha(0), makesha(1), makesha(2)])], ) - def test_multiple_hybrid(self): + def test_multiple_hybrid(self) -> None: self.assertParse( { makesha(0): [], @@ -66,22 +66,22 @@ def test_multiple_hybrid(self): class GraftSerializerTests(TestCase): - def assertSerialize(self, expected, graftpoints): + def assertSerialize(self, expected, graftpoints) -> None: self.assertEqual(sorted(expected), sorted(serialize_graftpoints(graftpoints))) - def test_no_grafts(self): + def test_no_grafts(self) -> None: self.assertSerialize(b"", {}) - def test_no_parents(self): + def test_no_parents(self) -> None: self.assertSerialize(makesha(0), {makesha(0): []}) - def test_parents(self): + def test_parents(self) -> None: self.assertSerialize( b" ".join([makesha(0), makesha(1), makesha(2)]), {makesha(0): [makesha(1), makesha(2)]}, ) - def test_multiple_hybrid(self): + def test_multiple_hybrid(self) -> None: self.assertSerialize( b"\n".join( [ @@ -99,7 +99,7 @@ def test_multiple_hybrid(self): class GraftsInRepositoryBase: - def tearDown(self): + def tearDown(self) -> None: super().tearDown() def get_repo_with_grafts(self, grafts): @@ -107,18 +107,18 @@ def get_repo_with_grafts(self, grafts): r._add_graftpoints(grafts) return r - def test_no_grafts(self): + def test_no_grafts(self) -> None: r = self.get_repo_with_grafts({}) shas = [e.commit.id for e in r.get_walker()] self.assertEqual(shas, self._shas[::-1]) - def test_no_parents_graft(self): + def test_no_parents_graft(self) -> None: r = self.get_repo_with_grafts({self._repo.head(): []}) self.assertEqual([e.commit.id for e in r.get_walker()], [r.head()]) - def test_existing_parent_graft(self): + def test_existing_parent_graft(self) -> None: r = self.get_repo_with_grafts({self._shas[-1]: [self._shas[0]]}) self.assertEqual( @@ -126,13 +126,13 @@ def test_existing_parent_graft(self): [self._shas[-1], self._shas[0]], ) - def test_remove_graft(self): + def test_remove_graft(self) -> None: r = self.get_repo_with_grafts({self._repo.head(): []}) r._remove_graftpoints([self._repo.head()]) self.assertEqual([e.commit.id for e in r.get_walker()], self._shas[::-1]) - def test_object_store_fail_invalid_parents(self): + def test_object_store_fail_invalid_parents(self) -> None: r = self._repo self.assertRaises( @@ -141,7 +141,7 @@ def test_object_store_fail_invalid_parents(self): class GraftsInRepoTests(GraftsInRepositoryBase, TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self._repo_dir = os.path.join(tempfile.mkdtemp()) r = self._repo = Repo.init(self._repo_dir) @@ -162,14 +162,14 @@ def setUp(self): self._shas.append(r.do_commit(b"empty commit", **commit_kwargs)) self._shas.append(r.do_commit(b"empty commit", **commit_kwargs)) - def test_init_with_empty_info_grafts(self): + def test_init_with_empty_info_grafts(self) -> None: r = self._repo r._put_named_file(os.path.join("info", "grafts"), b"") r = Repo(self._repo_dir) self.assertEqual({}, r._graftpoints) - def test_init_with_info_grafts(self): + def test_init_with_info_grafts(self) -> None: r = self._repo r._put_named_file( os.path.join("info", "grafts"), @@ -181,7 +181,7 @@ def test_init_with_info_grafts(self): class GraftsInMemoryRepoTests(GraftsInRepositoryBase, TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() r = self._repo = MemoryRepo() diff --git a/tests/test_graph.py b/tests/test_graph.py index 95fc8d61a..906f96ef1 100644 --- a/tests/test_graph.py +++ b/tests/test_graph.py @@ -32,7 +32,7 @@ def run_test(dag, inputs): def lookup_parents(commit_id): return dag[commit_id] - def lookup_stamp(commit_id): + def lookup_stamp(commit_id) -> int: # any constant timestamp value here will work to force # this test to test the same behaviour as done previously return 100 @@ -41,7 +41,7 @@ def lookup_stamp(commit_id): c2s = inputs[1:] return set(_find_lcas(lookup_parents, c1, c2s, lookup_stamp)) - def test_multiple_lca(self): + def test_multiple_lca(self) -> None: # two lowest common ancestors graph = { "5": ["1", "2"], @@ -53,7 +53,7 @@ def test_multiple_lca(self): } self.assertEqual(self.run_test(graph, ["4", "5"]), {"1", "2"}) - def test_no_common_ancestor(self): + def test_no_common_ancestor(self) -> None: # no common ancestor graph = { "4": ["2"], @@ -64,7 +64,7 @@ def test_no_common_ancestor(self): } self.assertEqual(self.run_test(graph, ["4", "3"]), set()) - def test_ancestor(self): + def test_ancestor(self) -> None: # ancestor graph = { "G": ["D", "F"], @@ -77,7 +77,7 @@ def test_ancestor(self): } self.assertEqual(self.run_test(graph, ["D", "C"]), {"C"}) - def test_direct_parent(self): + def test_direct_parent(self) -> None: # parent graph = { "G": ["D", "F"], @@ -90,7 +90,7 @@ def test_direct_parent(self): } self.assertEqual(self.run_test(graph, ["G", "D"]), {"D"}) - def test_another_crossover(self): + def test_another_crossover(self) -> None: # Another cross over graph = { "G": ["D", "F"], @@ -103,7 +103,7 @@ def test_another_crossover(self): } self.assertEqual(self.run_test(graph, ["D", "F"]), {"E", "C"}) - def test_three_way_merge_lca(self): + def test_three_way_merge_lca(self) -> None: # three way merge commit straight from git docs graph = { "C": ["C1"], @@ -126,7 +126,7 @@ def test_three_way_merge_lca(self): # which actually means find the first LCA from either of B OR C with A self.assertEqual(self.run_test(graph, ["A", "B", "C"]), {"1"}) - def test_octopus(self): + def test_octopus(self) -> None: # octopus algorithm test # test straight from git docs of A, B, and C # but this time use octopus to find lcas of A, B, and C simultaneously @@ -151,7 +151,7 @@ def test_octopus(self): def lookup_parents(cid): return graph[cid] - def lookup_stamp(commit_id): + def lookup_stamp(commit_id) -> int: # any constant timestamp value here will work to force # this test to test the same behaviour as done previously return 100 @@ -168,7 +168,7 @@ def lookup_stamp(commit_id): class CanFastForwardTests(TestCase): - def test_ff(self): + def test_ff(self) -> None: r = MemoryRepo() base = make_commit() c1 = make_commit(parents=[base.id]) @@ -179,7 +179,7 @@ def test_ff(self): self.assertTrue(can_fast_forward(r, c1.id, c2.id)) self.assertFalse(can_fast_forward(r, c2.id, c1.id)) - def test_diverged(self): + def test_diverged(self) -> None: r = MemoryRepo() base = make_commit() c1 = make_commit(parents=[base.id]) @@ -193,7 +193,7 @@ def test_diverged(self): class WorkListTest(TestCase): - def test_WorkList(self): + def test_WorkList(self) -> None: # tuples of (timestamp, value) are stored in a Priority MaxQueue # repeated use of get should return them in maxheap timestamp # order: largest time value (most recent in time) first then earlier/older diff --git a/tests/test_greenthreads.py b/tests/test_greenthreads.py index cd551e6df..120fda07d 100644 --- a/tests/test_greenthreads.py +++ b/tests/test_greenthreads.py @@ -66,13 +66,13 @@ def init_store(store, count=1): @skipIf(not gevent_support, skipmsg) class TestGreenThreadsMissingObjectFinder(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.store = MemoryObjectStore() self.cmt_amount = 10 self.objs = init_store(self.store, self.cmt_amount) - def test_finder(self): + def test_finder(self) -> None: wants = [sha.id for sha in self.objs if isinstance(sha, Commit)] finder = GreenThreadsMissingObjectFinder(self.store, (), wants) self.assertEqual(len(finder.sha_done), 0) diff --git a/tests/test_hooks.py b/tests/test_hooks.py index 965c98925..8d0c8843f 100644 --- a/tests/test_hooks.py +++ b/tests/test_hooks.py @@ -32,13 +32,13 @@ class ShellHookTests(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() if os.name != "posix": self.skipTest("shell hook tests requires POSIX shell") self.assertTrue(os.path.exists("/bin/sh")) - def test_hook_pre_commit(self): + def test_hook_pre_commit(self) -> None: repo_dir = os.path.join(tempfile.mkdtemp()) os.mkdir(os.path.join(repo_dir, "hooks")) self.addCleanup(shutil.rmtree, repo_dir) @@ -89,7 +89,7 @@ def test_hook_pre_commit(self): hook.execute() - def test_hook_commit_msg(self): + def test_hook_commit_msg(self) -> None: repo_dir = os.path.join(tempfile.mkdtemp()) os.mkdir(os.path.join(repo_dir, "hooks")) self.addCleanup(shutil.rmtree, repo_dir) @@ -133,7 +133,7 @@ def test_hook_commit_msg(self): hook.execute(b"empty commit") - def test_hook_post_commit(self): + def test_hook_post_commit(self) -> None: (fd, path) = tempfile.mkstemp() os.close(fd) diff --git a/tests/test_ignore.py b/tests/test_ignore.py index d051d1d46..d5165a990 100644 --- a/tests/test_ignore.py +++ b/tests/test_ignore.py @@ -89,7 +89,7 @@ class TranslateTests(TestCase): - def test_translate(self): + def test_translate(self) -> None: for pattern, regex in TRANSLATE_TESTS: if re.escape(b"/") == b"/": # Slash is no longer escaped in Python3.7, so undo the escaping @@ -103,7 +103,7 @@ def test_translate(self): class ReadIgnorePatterns(TestCase): - def test_read_file(self): + def test_read_file(self) -> None: f = BytesIO( b""" # a comment @@ -128,14 +128,14 @@ def test_read_file(self): class MatchPatternTests(TestCase): - def test_matches(self): + def test_matches(self) -> None: for path, pattern in POSITIVE_MATCH_TESTS: self.assertTrue( match_pattern(path, pattern), f"path: {path!r}, pattern: {pattern!r}", ) - def test_no_matches(self): + def test_no_matches(self) -> None: for path, pattern in NEGATIVE_MATCH_TESTS: self.assertFalse( match_pattern(path, pattern), @@ -144,14 +144,14 @@ def test_no_matches(self): class IgnoreFilterTests(TestCase): - def test_included(self): + def test_included(self) -> None: filter = IgnoreFilter([b"a.c", b"b.c"]) self.assertTrue(filter.is_ignored(b"a.c")) self.assertIs(None, filter.is_ignored(b"c.c")) self.assertEqual([Pattern(b"a.c")], list(filter.find_matching(b"a.c"))) self.assertEqual([], list(filter.find_matching(b"c.c"))) - def test_included_ignorecase(self): + def test_included_ignorecase(self) -> None: filter = IgnoreFilter([b"a.c", b"b.c"], ignorecase=False) self.assertTrue(filter.is_ignored(b"a.c")) self.assertFalse(filter.is_ignored(b"A.c")) @@ -160,14 +160,14 @@ def test_included_ignorecase(self): self.assertTrue(filter.is_ignored(b"A.c")) self.assertTrue(filter.is_ignored(b"A.C")) - def test_excluded(self): + def test_excluded(self) -> None: filter = IgnoreFilter([b"a.c", b"b.c", b"!c.c"]) self.assertFalse(filter.is_ignored(b"c.c")) self.assertIs(None, filter.is_ignored(b"d.c")) self.assertEqual([Pattern(b"!c.c")], list(filter.find_matching(b"c.c"))) self.assertEqual([], list(filter.find_matching(b"d.c"))) - def test_include_exclude_include(self): + def test_include_exclude_include(self) -> None: filter = IgnoreFilter([b"a.c", b"!a.c", b"a.c"]) self.assertTrue(filter.is_ignored(b"a.c")) self.assertEqual( @@ -175,7 +175,7 @@ def test_include_exclude_include(self): list(filter.find_matching(b"a.c")), ) - def test_manpage(self): + def test_manpage(self) -> None: # A specific example from the gitignore manpage filter = IgnoreFilter([b"/*", b"!/foo", b"/foo/*", b"!/foo/bar"]) self.assertTrue(filter.is_ignored(b"a.c")) @@ -185,7 +185,7 @@ def test_manpage(self): self.assertFalse(filter.is_ignored(b"foo/bar/")) self.assertFalse(filter.is_ignored(b"foo/bar/bloe")) - def test_regex_special(self): + def test_regex_special(self) -> None: # See https://github.com/dulwich/dulwich/issues/930#issuecomment-1026166429 filter = IgnoreFilter([b"/foo\\[bar\\]", b"/foo"]) self.assertTrue(filter.is_ignored("foo")) @@ -193,7 +193,7 @@ def test_regex_special(self): class IgnoreFilterStackTests(TestCase): - def test_stack_first(self): + def test_stack_first(self) -> None: filter1 = IgnoreFilter([b"[a].c", b"[b].c", b"![d].c"]) filter2 = IgnoreFilter([b"[a].c", b"![b],c", b"[c].c", b"[d].c"]) stack = IgnoreFilterStack([filter1, filter2]) @@ -205,7 +205,7 @@ def test_stack_first(self): class IgnoreFilterManagerTests(TestCase): - def test_load_ignore(self): + def test_load_ignore(self) -> None: tmp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) repo = Repo.init(tmp_dir) @@ -232,7 +232,7 @@ def test_load_ignore(self): self.assertTrue(m.is_ignored("dir3/")) self.assertTrue(m.is_ignored("dir3/bla")) - def test_nested_gitignores(self): + def test_nested_gitignores(self) -> None: tmp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) repo = Repo.init(tmp_dir) @@ -251,7 +251,7 @@ def test_nested_gitignores(self): m = IgnoreFilterManager.from_repo(repo) self.assertTrue(m.is_ignored("foo/bar")) - def test_load_ignore_ignorecase(self): + def test_load_ignore_ignorecase(self) -> None: tmp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) repo = Repo.init(tmp_dir) @@ -265,7 +265,7 @@ def test_load_ignore_ignorecase(self): self.assertTrue(m.is_ignored(os.path.join("dir", "blie"))) self.assertTrue(m.is_ignored(os.path.join("DIR", "blie"))) - def test_ignored_contents(self): + def test_ignored_contents(self) -> None: tmp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) repo = Repo.init(tmp_dir) diff --git a/tests/test_index.py b/tests/test_index.py index ab523b306..cffa1a58d 100644 --- a/tests/test_index.py +++ b/tests/test_index.py @@ -54,7 +54,7 @@ from . import TestCase, skipIf -def can_symlink(): +def can_symlink() -> bool: """Return whether running process can create symlinks.""" if sys.platform != "win32": # Platforms other than Windows should allow symlinks without issues. @@ -77,19 +77,19 @@ def get_simple_index(self, name): class SimpleIndexTestCase(IndexTestCase): - def test_len(self): + def test_len(self) -> None: self.assertEqual(1, len(self.get_simple_index("index"))) - def test_iter(self): + def test_iter(self) -> None: self.assertEqual([b"bla"], list(self.get_simple_index("index"))) - def test_iterobjects(self): + def test_iterobjects(self) -> None: self.assertEqual( [(b"bla", b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", 33188)], list(self.get_simple_index("index").iterobjects()), ) - def test_getitem(self): + def test_getitem(self) -> None: self.assertEqual( IndexEntry( (1230680220, 0), @@ -105,12 +105,12 @@ def test_getitem(self): self.get_simple_index("index")[b"bla"], ) - def test_empty(self): + def test_empty(self) -> None: i = self.get_simple_index("notanindex") self.assertEqual(0, len(i)) self.assertFalse(os.path.exists(i._filename)) - def test_against_empty_tree(self): + def test_against_empty_tree(self) -> None: i = self.get_simple_index("index") changes = list(i.changes_from_tree(MemoryObjectStore(), None)) self.assertEqual(1, len(changes)) @@ -120,15 +120,15 @@ def test_against_empty_tree(self): class SimpleIndexWriterTestCase(IndexTestCase): - def setUp(self): + def setUp(self) -> None: IndexTestCase.setUp(self) self.tempdir = tempfile.mkdtemp() - def tearDown(self): + def tearDown(self) -> None: IndexTestCase.tearDown(self) shutil.rmtree(self.tempdir) - def test_simple_write(self): + def test_simple_write(self) -> None: entries = [ ( SerializedIndexEntry( @@ -156,15 +156,15 @@ def test_simple_write(self): class ReadIndexDictTests(IndexTestCase): - def setUp(self): + def setUp(self) -> None: IndexTestCase.setUp(self) self.tempdir = tempfile.mkdtemp() - def tearDown(self): + def tearDown(self) -> None: IndexTestCase.tearDown(self) shutil.rmtree(self.tempdir) - def test_simple_write(self): + def test_simple_write(self) -> None: entries = { b"barbla": IndexEntry( (1230680220, 0), @@ -187,11 +187,11 @@ def test_simple_write(self): class CommitTreeTests(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.store = MemoryObjectStore() - def test_single_blob(self): + def test_single_blob(self) -> None: blob = Blob() blob.data = b"foo" self.store.add_object(blob) @@ -201,7 +201,7 @@ def test_single_blob(self): self.assertEqual((stat.S_IFREG, blob.id), self.store[rootid][b"bla"]) self.assertEqual({rootid, blob.id}, set(self.store._data.keys())) - def test_nested(self): + def test_nested(self) -> None: blob = Blob() blob.data = b"foo" self.store.add_object(blob) @@ -216,49 +216,49 @@ def test_nested(self): class CleanupModeTests(TestCase): - def assertModeEqual(self, expected, got): + def assertModeEqual(self, expected, got) -> None: self.assertEqual(expected, got, f"{expected:o} != {got:o}") - def test_file(self): + def test_file(self) -> None: self.assertModeEqual(0o100644, cleanup_mode(0o100000)) - def test_executable(self): + def test_executable(self) -> None: self.assertModeEqual(0o100755, cleanup_mode(0o100711)) self.assertModeEqual(0o100755, cleanup_mode(0o100700)) - def test_symlink(self): + def test_symlink(self) -> None: self.assertModeEqual(0o120000, cleanup_mode(0o120711)) - def test_dir(self): + def test_dir(self) -> None: self.assertModeEqual(0o040000, cleanup_mode(0o40531)) - def test_submodule(self): + def test_submodule(self) -> None: self.assertModeEqual(0o160000, cleanup_mode(0o160744)) class WriteCacheTimeTests(TestCase): - def test_write_string(self): + def test_write_string(self) -> None: f = BytesIO() self.assertRaises(TypeError, write_cache_time, f, "foo") - def test_write_int(self): + def test_write_int(self) -> None: f = BytesIO() write_cache_time(f, 434343) self.assertEqual(struct.pack(">LL", 434343, 0), f.getvalue()) - def test_write_tuple(self): + def test_write_tuple(self) -> None: f = BytesIO() write_cache_time(f, (434343, 21)) self.assertEqual(struct.pack(">LL", 434343, 21), f.getvalue()) - def test_write_float(self): + def test_write_float(self) -> None: f = BytesIO() write_cache_time(f, 434343.000000021) self.assertEqual(struct.pack(">LL", 434343, 21), f.getvalue()) class IndexEntryFromStatTests(TestCase): - def test_simple(self): + def test_simple(self) -> None: st = os.stat_result( ( 16877, @@ -289,7 +289,7 @@ def test_simple(self): ), ) - def test_override_mode(self): + def test_override_mode(self) -> None: st = os.stat_result( ( stat.S_IFREG + 0o644, @@ -322,19 +322,19 @@ def test_override_mode(self): class BuildIndexTests(TestCase): - def assertReasonableIndexEntry(self, index_entry, mode, filesize, sha): + def assertReasonableIndexEntry(self, index_entry, mode, filesize, sha) -> None: self.assertEqual(index_entry.mode, mode) # mode self.assertEqual(index_entry.size, filesize) # filesize self.assertEqual(index_entry.sha, sha) # sha - def assertFileContents(self, path, contents, symlink=False): + def assertFileContents(self, path, contents, symlink=False) -> None: if symlink: self.assertEqual(os.readlink(path), contents) else: with open(path, "rb") as f: self.assertEqual(f.read(), contents) - def test_empty(self): + def test_empty(self) -> None: repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: @@ -352,7 +352,7 @@ def test_empty(self): # Verify no files self.assertEqual([".git"], os.listdir(repo.path)) - def test_git_dir(self): + def test_git_dir(self) -> None: repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: @@ -386,7 +386,7 @@ def test_git_dir(self): ) self.assertFileContents(epath, b"d") - def test_nonempty(self): + def test_nonempty(self) -> None: repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: @@ -441,7 +441,7 @@ def test_nonempty(self): self.assertEqual(["d"], sorted(os.listdir(os.path.join(repo.path, "c")))) @skipIf(not getattr(os, "sync", None), "Requires sync support") - def test_norewrite(self): + def test_norewrite(self) -> None: repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: @@ -483,7 +483,7 @@ def test_norewrite(self): self.assertEqual(b"file a", fh.read()) @skipIf(not can_symlink(), "Requires symlink support") - def test_symlink(self): + def test_symlink(self) -> None: repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: @@ -515,7 +515,7 @@ def test_symlink(self): ) self.assertFileContents(epath, "d", symlink=True) - def test_no_decode_encode(self): + def test_no_decode_encode(self) -> None: repo_dir = tempfile.mkdtemp() repo_dir_bytes = os.fsencode(repo_dir) self.addCleanup(shutil.rmtree, repo_dir) @@ -560,7 +560,7 @@ def test_no_decode_encode(self): self.assertTrue(os.path.exists(utf8_path)) - def test_git_submodule(self): + def test_git_submodule(self) -> None: repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: @@ -600,7 +600,7 @@ def test_git_submodule(self): self.assertEqual(index[b"c"].mode, S_IFGITLINK) # mode self.assertEqual(index[b"c"].sha, c.id) # sha - def test_git_submodule_exists(self): + def test_git_submodule_exists(self) -> None: repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) with Repo.init(repo_dir) as repo: @@ -643,7 +643,7 @@ def test_git_submodule_exists(self): class GetUnstagedChangesTests(TestCase): - def test_get_unstaged_changes(self): + def test_get_unstaged_changes(self) -> None: """Unit test for get_unstaged_changes.""" repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) @@ -674,7 +674,7 @@ def test_get_unstaged_changes(self): self.assertEqual(list(changes), [b"foo1"]) - def test_get_unstaged_deleted_changes(self): + def test_get_unstaged_deleted_changes(self) -> None: """Unit test for get_unstaged_changes.""" repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) @@ -697,7 +697,7 @@ def test_get_unstaged_deleted_changes(self): self.assertEqual(list(changes), [b"foo1"]) - def test_get_unstaged_changes_removed_replaced_by_directory(self): + def test_get_unstaged_changes_removed_replaced_by_directory(self) -> None: """Unit test for get_unstaged_changes.""" repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) @@ -722,7 +722,7 @@ def test_get_unstaged_changes_removed_replaced_by_directory(self): self.assertEqual(list(changes), [b"foo1"]) @skipIf(not can_symlink(), "Requires symlink support") - def test_get_unstaged_changes_removed_replaced_by_link(self): + def test_get_unstaged_changes_removed_replaced_by_link(self) -> None: """Unit test for get_unstaged_changes.""" repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) @@ -748,7 +748,7 @@ def test_get_unstaged_changes_removed_replaced_by_link(self): class TestValidatePathElement(TestCase): - def test_default(self): + def test_default(self) -> None: self.assertTrue(validate_path_element_default(b"bla")) self.assertTrue(validate_path_element_default(b".bla")) self.assertFalse(validate_path_element_default(b".git")) @@ -756,7 +756,7 @@ def test_default(self): self.assertFalse(validate_path_element_default(b"..")) self.assertTrue(validate_path_element_default(b"git~1")) - def test_ntfs(self): + def test_ntfs(self) -> None: self.assertTrue(validate_path_element_ntfs(b"bla")) self.assertTrue(validate_path_element_ntfs(b".bla")) self.assertFalse(validate_path_element_ntfs(b".git")) @@ -766,7 +766,7 @@ def test_ntfs(self): class TestTreeFSPathConversion(TestCase): - def test_tree_to_fs_path(self): + def test_tree_to_fs_path(self) -> None: tree_path = "délwíçh/foo".encode() fs_path = _tree_to_fs_path(b"/prefix/path", tree_path) self.assertEqual( @@ -774,12 +774,12 @@ def test_tree_to_fs_path(self): os.fsencode(os.path.join("/prefix/path", "délwíçh", "foo")), ) - def test_fs_to_tree_path_str(self): + def test_fs_to_tree_path_str(self) -> None: fs_path = os.path.join(os.path.join("délwíçh", "foo")) tree_path = _fs_to_tree_path(fs_path) self.assertEqual(tree_path, "délwíçh/foo".encode()) - def test_fs_to_tree_path_bytes(self): + def test_fs_to_tree_path_bytes(self) -> None: fs_path = os.path.join(os.fsencode(os.path.join("délwíçh", "foo"))) tree_path = _fs_to_tree_path(fs_path) self.assertEqual(tree_path, "délwíçh/foo".encode()) diff --git a/tests/test_lfs.py b/tests/test_lfs.py index 484b666d8..1a045488d 100644 --- a/tests/test_lfs.py +++ b/tests/test_lfs.py @@ -29,16 +29,16 @@ class LFSTests(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.test_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.test_dir) self.lfs = LFSStore.create(self.test_dir) - def test_create(self): + def test_create(self) -> None: sha = self.lfs.write_object([b"a", b"b"]) with self.lfs.open_object(sha) as f: self.assertEqual(b"ab", f.read()) - def test_missing(self): + def test_missing(self) -> None: self.assertRaises(KeyError, self.lfs.open_object, "abcdeabcdeabcdeabcde") diff --git a/tests/test_line_ending.py b/tests/test_line_ending.py index 79254ebda..d3ad44382 100644 --- a/tests/test_line_ending.py +++ b/tests/test_line_ending.py @@ -35,59 +35,59 @@ class LineEndingConversion(TestCase): """Test the line ending conversion functions in various cases.""" - def test_convert_crlf_to_lf_no_op(self): + def test_convert_crlf_to_lf_no_op(self) -> None: self.assertEqual(convert_crlf_to_lf(b"foobar"), b"foobar") - def test_convert_crlf_to_lf(self): + def test_convert_crlf_to_lf(self) -> None: self.assertEqual(convert_crlf_to_lf(b"line1\r\nline2"), b"line1\nline2") - def test_convert_crlf_to_lf_mixed(self): + def test_convert_crlf_to_lf_mixed(self) -> None: self.assertEqual(convert_crlf_to_lf(b"line1\r\n\nline2"), b"line1\n\nline2") - def test_convert_lf_to_crlf_no_op(self): + def test_convert_lf_to_crlf_no_op(self) -> None: self.assertEqual(convert_lf_to_crlf(b"foobar"), b"foobar") - def test_convert_lf_to_crlf(self): + def test_convert_lf_to_crlf(self) -> None: self.assertEqual(convert_lf_to_crlf(b"line1\nline2"), b"line1\r\nline2") - def test_convert_lf_to_crlf_mixed(self): + def test_convert_lf_to_crlf_mixed(self) -> None: self.assertEqual(convert_lf_to_crlf(b"line1\r\n\nline2"), b"line1\r\n\r\nline2") class GetLineEndingAutocrlfFilters(TestCase): - def test_get_checkin_filter_autocrlf_default(self): + def test_get_checkin_filter_autocrlf_default(self) -> None: checkin_filter = get_checkin_filter_autocrlf(b"false") self.assertEqual(checkin_filter, None) - def test_get_checkin_filter_autocrlf_true(self): + def test_get_checkin_filter_autocrlf_true(self) -> None: checkin_filter = get_checkin_filter_autocrlf(b"true") self.assertEqual(checkin_filter, convert_crlf_to_lf) - def test_get_checkin_filter_autocrlf_input(self): + def test_get_checkin_filter_autocrlf_input(self) -> None: checkin_filter = get_checkin_filter_autocrlf(b"input") self.assertEqual(checkin_filter, convert_crlf_to_lf) - def test_get_checkout_filter_autocrlf_default(self): + def test_get_checkout_filter_autocrlf_default(self) -> None: checkout_filter = get_checkout_filter_autocrlf(b"false") self.assertEqual(checkout_filter, None) - def test_get_checkout_filter_autocrlf_true(self): + def test_get_checkout_filter_autocrlf_true(self) -> None: checkout_filter = get_checkout_filter_autocrlf(b"true") self.assertEqual(checkout_filter, convert_lf_to_crlf) - def test_get_checkout_filter_autocrlf_input(self): + def test_get_checkout_filter_autocrlf_input(self) -> None: checkout_filter = get_checkout_filter_autocrlf(b"input") self.assertEqual(checkout_filter, None) class NormalizeBlobTestCase(TestCase): - def test_normalize_to_lf_no_op(self): + def test_normalize_to_lf_no_op(self) -> None: base_content = b"line1\nline2" base_sha = "f8be7bb828880727816015d21abcbc37d033f233" @@ -104,7 +104,7 @@ def test_normalize_to_lf_no_op(self): self.assertEqual(filtered_blob.as_raw_chunks(), [base_content]) self.assertEqual(filtered_blob.sha().hexdigest(), base_sha) - def test_normalize_to_lf(self): + def test_normalize_to_lf(self) -> None: base_content = b"line1\r\nline2" base_sha = "3a1bd7a52799fe5cf6411f1d35f4c10bacb1db96" @@ -124,7 +124,7 @@ def test_normalize_to_lf(self): self.assertEqual(filtered_blob.as_raw_chunks(), [normalized_content]) self.assertEqual(filtered_blob.sha().hexdigest(), normalized_sha) - def test_normalize_to_lf_binary(self): + def test_normalize_to_lf_binary(self) -> None: base_content = b"line1\r\nline2\0" base_sha = "b44504193b765f7cd79673812de8afb55b372ab2" @@ -141,7 +141,7 @@ def test_normalize_to_lf_binary(self): self.assertEqual(filtered_blob.as_raw_chunks(), [base_content]) self.assertEqual(filtered_blob.sha().hexdigest(), base_sha) - def test_normalize_to_crlf_no_op(self): + def test_normalize_to_crlf_no_op(self) -> None: base_content = b"line1\r\nline2" base_sha = "3a1bd7a52799fe5cf6411f1d35f4c10bacb1db96" @@ -158,7 +158,7 @@ def test_normalize_to_crlf_no_op(self): self.assertEqual(filtered_blob.as_raw_chunks(), [base_content]) self.assertEqual(filtered_blob.sha().hexdigest(), base_sha) - def test_normalize_to_crlf(self): + def test_normalize_to_crlf(self) -> None: base_content = b"line1\nline2" base_sha = "f8be7bb828880727816015d21abcbc37d033f233" @@ -178,7 +178,7 @@ def test_normalize_to_crlf(self): self.assertEqual(filtered_blob.as_raw_chunks(), [normalized_content]) self.assertEqual(filtered_blob.sha().hexdigest(), normalized_sha) - def test_normalize_to_crlf_binary(self): + def test_normalize_to_crlf_binary(self) -> None: base_content = b"line1\r\nline2\0" base_sha = "b44504193b765f7cd79673812de8afb55b372ab2" diff --git a/tests/test_lru_cache.py b/tests/test_lru_cache.py index 343892c76..dc2f8df52 100644 --- a/tests/test_lru_cache.py +++ b/tests/test_lru_cache.py @@ -27,7 +27,7 @@ class TestLRUCache(TestCase): """Test that LRU cache properly keeps track of entries.""" - def test_cache_size(self): + def test_cache_size(self) -> None: cache = lru_cache.LRUCache(max_cache=10) self.assertEqual(10, cache.cache_size()) @@ -37,7 +37,7 @@ def test_cache_size(self): cache.resize(512) self.assertEqual(512, cache.cache_size()) - def test_missing(self): + def test_missing(self) -> None: cache = lru_cache.LRUCache(max_cache=10) self.assertNotIn("foo", cache) @@ -48,7 +48,7 @@ def test_missing(self): self.assertIn("foo", cache) self.assertNotIn("bar", cache) - def test_map_None(self): + def test_map_None(self) -> None: # Make sure that we can properly map None as a key. cache = lru_cache.LRUCache(max_cache=10) self.assertNotIn(None, cache) @@ -65,11 +65,11 @@ def test_map_None(self): cache[None] self.assertEqual([None, 1], [n.key for n in cache._walk_lru()]) - def test_add__null_key(self): + def test_add__null_key(self) -> None: cache = lru_cache.LRUCache(max_cache=10) self.assertRaises(ValueError, cache.add, lru_cache._null_key, 1) - def test_overflow(self): + def test_overflow(self) -> None: """Adding extra entries will pop out old ones.""" cache = lru_cache.LRUCache(max_cache=1, after_cleanup_count=1) @@ -82,7 +82,7 @@ def test_overflow(self): self.assertEqual("biz", cache["baz"]) - def test_by_usage(self): + def test_by_usage(self) -> None: """Accessing entries bumps them up in priority.""" cache = lru_cache.LRUCache(max_cache=2) @@ -96,11 +96,11 @@ def test_by_usage(self): self.assertNotIn("foo", cache) - def test_cleanup(self): + def test_cleanup(self) -> None: """Test that we can use a cleanup function.""" cleanup_called = [] - def cleanup_func(key, val): + def cleanup_func(key, val) -> None: cleanup_called.append((key, val)) cache = lru_cache.LRUCache(max_cache=2, after_cleanup_count=2) @@ -116,11 +116,11 @@ def cleanup_func(key, val): cache.clear() self.assertEqual([("baz", "1"), ("biz", "3"), ("foo", "2")], cleanup_called) - def test_cleanup_on_replace(self): + def test_cleanup_on_replace(self) -> None: """Replacing an object should cleanup the old value.""" cleanup_called = [] - def cleanup_func(key, val): + def cleanup_func(key, val) -> None: cleanup_called.append((key, val)) cache = lru_cache.LRUCache(max_cache=2) @@ -135,7 +135,7 @@ def cleanup_func(key, val): cache[2] = 26 self.assertEqual([(2, 20), (2, 25)], cleanup_called) - def test_len(self): + def test_len(self) -> None: cache = lru_cache.LRUCache(max_cache=10, after_cleanup_count=10) cache[1] = 10 @@ -167,7 +167,7 @@ def test_len(self): [n.key for n in cache._walk_lru()], ) - def test_cleanup_shrinks_to_after_clean_count(self): + def test_cleanup_shrinks_to_after_clean_count(self) -> None: cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=3) cache.add(1, 10) @@ -182,16 +182,16 @@ def test_cleanup_shrinks_to_after_clean_count(self): cache.add(6, 40) self.assertEqual(3, len(cache)) - def test_after_cleanup_larger_than_max(self): + def test_after_cleanup_larger_than_max(self) -> None: cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=10) self.assertEqual(5, cache._after_cleanup_count) - def test_after_cleanup_none(self): + def test_after_cleanup_none(self) -> None: cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=None) # By default _after_cleanup_size is 80% of the normal size self.assertEqual(4, cache._after_cleanup_count) - def test_cleanup_2(self): + def test_cleanup_2(self) -> None: cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=2) # Add these in order @@ -206,7 +206,7 @@ def test_cleanup_2(self): cache.cleanup() self.assertEqual(2, len(cache)) - def test_preserve_last_access_order(self): + def test_preserve_last_access_order(self) -> None: cache = lru_cache.LRUCache(max_cache=5) # Add these in order @@ -225,7 +225,7 @@ def test_preserve_last_access_order(self): cache[2] self.assertEqual([2, 3, 5, 4, 1], [n.key for n in cache._walk_lru()]) - def test_get(self): + def test_get(self) -> None: cache = lru_cache.LRUCache(max_cache=5) cache.add(1, 10) @@ -238,7 +238,7 @@ def test_get(self): self.assertEqual(10, cache.get(1)) self.assertEqual([1, 2], [n.key for n in cache._walk_lru()]) - def test_keys(self): + def test_keys(self) -> None: cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=5) cache[1] = 2 @@ -250,7 +250,7 @@ def test_keys(self): cache[6] = 7 self.assertEqual([2, 3, 4, 5, 6], sorted(cache.keys())) - def test_resize_smaller(self): + def test_resize_smaller(self) -> None: cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=4) cache[1] = 2 cache[2] = 3 @@ -269,7 +269,7 @@ def test_resize_smaller(self): cache[8] = 9 self.assertEqual([7, 8], sorted(cache.keys())) - def test_resize_larger(self): + def test_resize_larger(self) -> None: cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=4) cache[1] = 2 cache[2] = 3 @@ -291,23 +291,23 @@ def test_resize_larger(self): class TestLRUSizeCache(TestCase): - def test_basic_init(self): + def test_basic_init(self) -> None: cache = lru_cache.LRUSizeCache() self.assertEqual(2048, cache._max_cache) self.assertEqual(int(cache._max_size * 0.8), cache._after_cleanup_size) self.assertEqual(0, cache._value_size) - def test_add__null_key(self): + def test_add__null_key(self) -> None: cache = lru_cache.LRUSizeCache() self.assertRaises(ValueError, cache.add, lru_cache._null_key, 1) - def test_add_tracks_size(self): + def test_add_tracks_size(self) -> None: cache = lru_cache.LRUSizeCache() self.assertEqual(0, cache._value_size) cache.add("my key", "my value text") self.assertEqual(13, cache._value_size) - def test_remove_tracks_size(self): + def test_remove_tracks_size(self) -> None: cache = lru_cache.LRUSizeCache() self.assertEqual(0, cache._value_size) cache.add("my key", "my value text") @@ -316,7 +316,7 @@ def test_remove_tracks_size(self): cache._remove_node(node) self.assertEqual(0, cache._value_size) - def test_no_add_over_size(self): + def test_no_add_over_size(self) -> None: """Adding a large value may not be cached at all.""" cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=5) self.assertEqual(0, cache._value_size) @@ -337,11 +337,11 @@ def test_no_add_over_size(self): self.assertEqual(3, cache._value_size) self.assertEqual({"test": "key"}, cache.items()) - def test_no_add_over_size_cleanup(self): + def test_no_add_over_size_cleanup(self) -> None: """If a large value is not cached, we will call cleanup right away.""" cleanup_calls = [] - def cleanup(key, value): + def cleanup(key, value) -> None: cleanup_calls.append((key, value)) cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=5) @@ -354,7 +354,7 @@ def cleanup(key, value): # and cleanup was called self.assertEqual([("test", "key that is too big")], cleanup_calls) - def test_adding_clears_cache_based_on_size(self): + def test_adding_clears_cache_based_on_size(self) -> None: """The cache is cleared in LRU order until small enough.""" cache = lru_cache.LRUSizeCache(max_size=20) cache.add("key1", "value") # 5 chars @@ -367,7 +367,7 @@ def test_adding_clears_cache_based_on_size(self): self.assertEqual(6 + 8, cache._value_size) self.assertEqual({"key2": "value2", "key4": "value234"}, cache.items()) - def test_adding_clears_to_after_cleanup_size(self): + def test_adding_clears_to_after_cleanup_size(self) -> None: cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10) cache.add("key1", "value") # 5 chars cache.add("key2", "value2") # 6 chars @@ -379,7 +379,7 @@ def test_adding_clears_to_after_cleanup_size(self): self.assertEqual(8, cache._value_size) self.assertEqual({"key4": "value234"}, cache.items()) - def test_custom_sizes(self): + def test_custom_sizes(self) -> None: def size_of_list(lst): return sum(len(x) for x in lst) @@ -397,7 +397,7 @@ def size_of_list(lst): self.assertEqual(8, cache._value_size) self.assertEqual({"key4": ["value", "234"]}, cache.items()) - def test_cleanup(self): + def test_cleanup(self) -> None: cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10) # Add these in order @@ -410,7 +410,7 @@ def test_cleanup(self): # Only the most recent fits after cleaning up self.assertEqual(7, cache._value_size) - def test_keys(self): + def test_keys(self) -> None: cache = lru_cache.LRUSizeCache(max_size=10) cache[1] = "a" @@ -418,7 +418,7 @@ def test_keys(self): cache[3] = "cdef" self.assertEqual([1, 2, 3], sorted(cache.keys())) - def test_resize_smaller(self): + def test_resize_smaller(self) -> None: cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=9) cache[1] = "abc" cache[2] = "def" @@ -435,7 +435,7 @@ def test_resize_smaller(self): cache[6] = "pqr" self.assertEqual([6], sorted(cache.keys())) - def test_resize_larger(self): + def test_resize_larger(self) -> None: cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=9) cache[1] = "abc" cache[2] = "def" diff --git a/tests/test_mailmap.py b/tests/test_mailmap.py index 08a882a50..c032d1d9d 100644 --- a/tests/test_mailmap.py +++ b/tests/test_mailmap.py @@ -27,7 +27,7 @@ class ReadMailmapTests(TestCase): - def test_read(self): + def test_read(self) -> None: b = BytesIO( b"""\ Jane Doe @@ -67,7 +67,7 @@ def test_read(self): class MailmapTests(TestCase): - def test_lookup(self): + def test_lookup(self) -> None: m = Mailmap() m.add_entry((b"Jane Doe", b"jane@desktop.(none)"), (None, None)) m.add_entry((b"Joe R. Developer", b"joe@example.com"), None) diff --git a/tests/test_missing_obj_finder.py b/tests/test_missing_obj_finder.py index b222099a2..6faee7901 100644 --- a/tests/test_missing_obj_finder.py +++ b/tests/test_missing_obj_finder.py @@ -26,7 +26,7 @@ class MissingObjectFinderTest(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.store = MemoryObjectStore() self.commits = [] @@ -34,7 +34,7 @@ def setUp(self): def cmt(self, n): return self.commits[n - 1] - def assertMissingMatch(self, haves, wants, expected): + def assertMissingMatch(self, haves, wants, expected) -> None: for sha, path in MissingObjectFinder(self.store, haves, wants, shallow=set()): self.assertIn( sha, expected, f"({sha},{path}) erroneously reported as missing" @@ -49,7 +49,7 @@ def assertMissingMatch(self, haves, wants, expected): class MOFLinearRepoTest(MissingObjectFinderTest): - def setUp(self): + def setUp(self) -> None: super().setUp() # present in 1, removed in 3 f1_1 = make_object(Blob, data=b"f1") @@ -84,23 +84,23 @@ def setUp(self): f2_3.id, ] - def test_1_to_2(self): + def test_1_to_2(self) -> None: self.assertMissingMatch([self.cmt(1).id], [self.cmt(2).id], self.missing_1_2) - def test_2_to_3(self): + def test_2_to_3(self) -> None: self.assertMissingMatch([self.cmt(2).id], [self.cmt(3).id], self.missing_2_3) - def test_1_to_3(self): + def test_1_to_3(self) -> None: self.assertMissingMatch([self.cmt(1).id], [self.cmt(3).id], self.missing_1_3) - def test_bogus_haves(self): + def test_bogus_haves(self) -> None: """Ensure non-existent SHA in haves are tolerated.""" bogus_sha = self.cmt(2).id[::-1] haves = [self.cmt(1).id, bogus_sha] wants = [self.cmt(3).id] self.assertMissingMatch(haves, wants, self.missing_1_3) - def test_bogus_wants_failure(self): + def test_bogus_wants_failure(self) -> None: """Ensure non-existent SHA in wants are not tolerated.""" bogus_sha = self.cmt(2).id[::-1] haves = [self.cmt(1).id] @@ -109,7 +109,7 @@ def test_bogus_wants_failure(self): KeyError, MissingObjectFinder, self.store, haves, wants, shallow=set() ) - def test_no_changes(self): + def test_no_changes(self) -> None: self.assertMissingMatch([self.cmt(3).id], [self.cmt(3).id], []) @@ -120,7 +120,7 @@ class MOFMergeForkRepoTest(MissingObjectFinderTest): # \ # 5 - def setUp(self): + def setUp(self) -> None: super().setUp() f1_1 = make_object(Blob, data=b"f1") f1_2 = make_object(Blob, data=b"f1-2") @@ -153,7 +153,7 @@ def setUp(self): self.assertEqual(f1_2.id, f1_7.id, "[sanity]") - def test_have6_want7(self): + def test_have6_want7(self) -> None: # have 6, want 7. Ideally, shall not report f1_7 as it's the same as # f1_2, however, to do so, MissingObjectFinder shall not record trees # of common commits only, but also all parent trees and tree items, @@ -166,7 +166,7 @@ def test_have6_want7(self): [self.cmt(7).id, self.cmt(7).tree, self.f1_7_id], ) - def test_have4_want7(self): + def test_have4_want7(self) -> None: # have 4, want 7. Shall not include rev5 as it is not in the tree # between 4 and 7 (well, it is, but its SHA's are irrelevant for 4..7 # commit hierarchy) @@ -185,7 +185,7 @@ def test_have4_want7(self): ], ) - def test_have1_want6(self): + def test_have1_want6(self) -> None: # have 1, want 6. Shall not include rev5 self.assertMissingMatch( [self.cmt(1).id], @@ -206,7 +206,7 @@ def test_have1_want6(self): ], ) - def test_have3_want6(self): + def test_have3_want6(self) -> None: # have 3, want 7. Shall not report rev2 and its tree, because # haves(3) means has parents, i.e. rev2, too # BUT shall report any changes descending rev2 (excluding rev3) @@ -225,7 +225,7 @@ def test_have3_want6(self): ], ) - def test_have5_want7(self): + def test_have5_want7(self) -> None: # have 5, want 7. Common parent is rev2, hence children of rev2 from # a descent line other than rev5 shall be reported # expects f1_4 from rev6. f3_5 is known in rev5; @@ -246,7 +246,7 @@ def test_have5_want7(self): class MOFTagsTest(MissingObjectFinderTest): - def setUp(self): + def setUp(self) -> None: super().setUp() f1_1 = make_object(Blob, data=b"f1") commit_spec = [[1]] @@ -270,7 +270,7 @@ def setUp(self): self.f1_1_id = f1_1.id - def test_tagged_commit(self): + def test_tagged_commit(self) -> None: # The user already has the tagged commit, all they want is the tag, # so send them only the tag object. self.assertMissingMatch( @@ -278,7 +278,7 @@ def test_tagged_commit(self): ) # The remaining cases are unusual, but do happen in the wild. - def test_tagged_tag(self): + def test_tagged_tag(self) -> None: # User already has tagged tag, send only tag of tag self.assertMissingMatch( [self._normal_tag.id], [self._tag_of_tag.id], [self._tag_of_tag.id] @@ -290,19 +290,19 @@ def test_tagged_tag(self): [self._normal_tag.id, self._tag_of_tag.id], ) - def test_tagged_tree(self): + def test_tagged_tree(self) -> None: self.assertMissingMatch( [], [self._tag_of_tree.id], [self._tag_of_tree.id, self.cmt(1).tree, self.f1_1_id], ) - def test_tagged_blob(self): + def test_tagged_blob(self) -> None: self.assertMissingMatch( [], [self._tag_of_blob.id], [self._tag_of_blob.id, self.f1_1_id] ) - def test_tagged_tagged_blob(self): + def test_tagged_tagged_blob(self) -> None: self.assertMissingMatch( [], [self._tag_of_tag_of_blob.id], diff --git a/tests/test_object_store.py b/tests/test_object_store.py index 4283a26e6..a97c206aa 100644 --- a/tests/test_object_store.py +++ b/tests/test_object_store.py @@ -58,18 +58,18 @@ class OverlayObjectStoreTests(ObjectStoreTests, TestCase): - def setUp(self): + def setUp(self) -> None: TestCase.setUp(self) self.bases = [MemoryObjectStore(), MemoryObjectStore()] self.store = OverlayObjectStore(self.bases, self.bases[0]) class MemoryObjectStoreTests(ObjectStoreTests, TestCase): - def setUp(self): + def setUp(self) -> None: TestCase.setUp(self) self.store = MemoryObjectStore() - def test_add_pack(self): + def test_add_pack(self) -> None: o = MemoryObjectStore() f, commit, abort = o.add_pack() try: @@ -81,12 +81,12 @@ def test_add_pack(self): else: commit() - def test_add_pack_emtpy(self): + def test_add_pack_emtpy(self) -> None: o = MemoryObjectStore() f, commit, abort = o.add_pack() commit() - def test_add_thin_pack(self): + def test_add_thin_pack(self) -> None: o = MemoryObjectStore() blob = make_object(Blob, data=b"yummy data") o.add_object(blob) @@ -105,7 +105,7 @@ def test_add_thin_pack(self): (Blob.type_num, b"more yummy data"), o.get_raw(packed_blob_sha) ) - def test_add_thin_pack_empty(self): + def test_add_thin_pack_empty(self) -> None: o = MemoryObjectStore() f = BytesIO() @@ -115,24 +115,24 @@ def test_add_thin_pack_empty(self): class DiskObjectStoreTests(PackBasedObjectStoreTests, TestCase): - def setUp(self): + def setUp(self) -> None: TestCase.setUp(self) self.store_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.store_dir) self.store = DiskObjectStore.init(self.store_dir) - def tearDown(self): + def tearDown(self) -> None: TestCase.tearDown(self) PackBasedObjectStoreTests.tearDown(self) - def test_loose_compression_level(self): + def test_loose_compression_level(self) -> None: alternate_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, alternate_dir) alternate_store = DiskObjectStore(alternate_dir, loose_compression_level=6) b2 = make_object(Blob, data=b"yummy data") alternate_store.add_object(b2) - def test_alternates(self): + def test_alternates(self) -> None: alternate_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, alternate_dir) alternate_store = DiskObjectStore(alternate_dir) @@ -144,7 +144,7 @@ def test_alternates(self): self.assertIn(b2.id, store) self.assertEqual(b2, store[b2.id]) - def test_read_alternate_paths(self): + def test_read_alternate_paths(self) -> None: store = DiskObjectStore(self.store_dir) abs_path = os.path.abspath(os.path.normpath("/abspath")) @@ -164,7 +164,7 @@ def test_read_alternate_paths(self): for alt_path in store._read_alternate_paths(): self.assertNotIn("#", alt_path) - def test_file_modes(self): + def test_file_modes(self) -> None: self.store.add_object(testobject) path = self.store._get_shafile_path(testobject.id) mode = os.stat(path).st_mode @@ -172,7 +172,7 @@ def test_file_modes(self): packmode = "0o100444" if sys.platform != "win32" else "0o100666" self.assertEqual(oct(mode), packmode) - def test_corrupted_object_raise_exception(self): + def test_corrupted_object_raise_exception(self) -> None: """Corrupted sha1 disk file should raise specific exception.""" self.store.add_object(testobject) self.assertEqual( @@ -202,7 +202,7 @@ def test_corrupted_object_raise_exception(self): # this does not change iteration on loose objects though self.assertEqual([testobject.id], list(self.store._iter_loose_objects())) - def test_tempfile_in_loose_store(self): + def test_tempfile_in_loose_store(self) -> None: self.store.add_object(testobject) self.assertEqual([testobject.id], list(self.store._iter_loose_objects())) @@ -216,7 +216,7 @@ def test_tempfile_in_loose_store(self): self.assertEqual([testobject.id], list(self.store._iter_loose_objects())) - def test_add_alternate_path(self): + def test_add_alternate_path(self) -> None: store = DiskObjectStore(self.store_dir) self.assertEqual([], list(store._read_alternate_paths())) store.add_alternate_path(os.path.abspath("/foo/path")) @@ -239,7 +239,7 @@ def test_add_alternate_path(self): list(store._read_alternate_paths()), ) - def test_rel_alternative_path(self): + def test_rel_alternative_path(self) -> None: alternate_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, alternate_dir) alternate_store = DiskObjectStore(alternate_dir) @@ -252,11 +252,11 @@ def test_rel_alternative_path(self): self.assertIn(b2.id, store) self.assertEqual(b2, store[b2.id]) - def test_pack_dir(self): + def test_pack_dir(self) -> None: o = DiskObjectStore(self.store_dir) self.assertEqual(os.path.join(self.store_dir, "pack"), o.pack_dir) - def test_add_pack(self): + def test_add_pack(self) -> None: o = DiskObjectStore(self.store_dir) self.addCleanup(o.close) f, commit, abort = o.add_pack() @@ -269,7 +269,7 @@ def test_add_pack(self): else: commit() - def test_add_thin_pack(self): + def test_add_thin_pack(self) -> None: o = DiskObjectStore(self.store_dir) try: blob = make_object(Blob, data=b"yummy data") @@ -297,7 +297,7 @@ def test_add_thin_pack(self): finally: o.close() - def test_add_thin_pack_empty(self): + def test_add_thin_pack_empty(self) -> None: with closing(DiskObjectStore(self.store_dir)) as o: f = BytesIO() entries = build_pack(f, [], store=o) @@ -306,7 +306,7 @@ def test_add_thin_pack_empty(self): class TreeLookupPathTests(TestCase): - def setUp(self): + def setUp(self) -> None: TestCase.setUp(self) self.store = MemoryObjectStore() blob_a = make_object(Blob, data=b"a") @@ -328,11 +328,11 @@ def setUp(self): def get_object(self, sha): return self.store[sha] - def test_lookup_blob(self): + def test_lookup_blob(self) -> None: o_id = tree_lookup_path(self.get_object, self.tree_id, b"a")[1] self.assertIsInstance(self.store[o_id], Blob) - def test_lookup_tree(self): + def test_lookup_tree(self) -> None: o_id = tree_lookup_path(self.get_object, self.tree_id, b"ad")[1] self.assertIsInstance(self.store[o_id], Tree) o_id = tree_lookup_path(self.get_object, self.tree_id, b"ad/bd")[1] @@ -340,7 +340,7 @@ def test_lookup_tree(self): o_id = tree_lookup_path(self.get_object, self.tree_id, b"ad/bd/")[1] self.assertIsInstance(self.store[o_id], Tree) - def test_lookup_submodule(self): + def test_lookup_submodule(self) -> None: tree_lookup_path(self.get_object, self.tree_id, b"d")[1] self.assertRaises( SubmoduleEncountered, @@ -350,12 +350,12 @@ def test_lookup_submodule(self): b"d/a", ) - def test_lookup_nonexistent(self): + def test_lookup_nonexistent(self) -> None: self.assertRaises( KeyError, tree_lookup_path, self.get_object, self.tree_id, b"j" ) - def test_lookup_not_tree(self): + def test_lookup_not_tree(self) -> None: self.assertRaises( NotTreeError, tree_lookup_path, @@ -374,40 +374,40 @@ def get_walker(self, heads, parent_map): [x * 40 for x in heads], new_parent_map.__getitem__ ) - def test_ack_invalid_value(self): + def test_ack_invalid_value(self) -> None: gw = self.get_walker([], {}) self.assertRaises(ValueError, gw.ack, "tooshort") - def test_empty(self): + def test_empty(self) -> None: gw = self.get_walker([], {}) self.assertIs(None, next(gw)) gw.ack(b"a" * 40) self.assertIs(None, next(gw)) - def test_descends(self): + def test_descends(self) -> None: gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": []}) self.assertEqual(b"a" * 40, next(gw)) self.assertEqual(b"b" * 40, next(gw)) - def test_present(self): + def test_present(self) -> None: gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": []}) gw.ack(b"a" * 40) self.assertIs(None, next(gw)) - def test_parent_present(self): + def test_parent_present(self) -> None: gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": []}) self.assertEqual(b"a" * 40, next(gw)) gw.ack(b"a" * 40) self.assertIs(None, next(gw)) - def test_child_ack_later(self): + def test_child_ack_later(self) -> None: gw = self.get_walker([b"a"], {b"a": [b"b"], b"b": [b"c"], b"c": []}) self.assertEqual(b"a" * 40, next(gw)) self.assertEqual(b"b" * 40, next(gw)) gw.ack(b"a" * 40) self.assertIs(None, next(gw)) - def test_only_once(self): + def test_only_once(self) -> None: # a b # | | # c d @@ -447,7 +447,7 @@ def test_only_once(self): class CommitTreeChangesTests(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.store = MemoryObjectStore() self.blob_a = make_object(Blob, data=b"a") @@ -465,13 +465,13 @@ def setUp(self): ] self.tree_id = commit_tree(self.store, blobs) - def test_no_changes(self): + def test_no_changes(self) -> None: self.assertEqual( self.store[self.tree_id], commit_tree_changes(self.store, self.store[self.tree_id], []), ) - def test_add_blob(self): + def test_add_blob(self) -> None: blob_d = make_object(Blob, data=b"d") new_tree = commit_tree_changes( self.store, self.store[self.tree_id], [(b"d", 0o100644, blob_d.id)] @@ -481,7 +481,7 @@ def test_add_blob(self): (33188, b"c59d9b6344f1af00e504ba698129f07a34bbed8d"), ) - def test_add_blob_in_dir(self): + def test_add_blob_in_dir(self) -> None: blob_d = make_object(Blob, data=b"d") new_tree = commit_tree_changes( self.store, @@ -522,7 +522,7 @@ def test_add_blob_in_dir(self): [TreeEntry(path=b"d", mode=stat.S_IFREG | 0o100644, sha=blob_d.id)], ) - def test_delete_blob(self): + def test_delete_blob(self) -> None: new_tree = commit_tree_changes( self.store, self.store[self.tree_id], [(b"ad/bd/c", None, None)] ) @@ -532,7 +532,7 @@ def test_delete_blob(self): class TestReadPacksFile(TestCase): - def test_read_packs(self): + def test_read_packs(self) -> None: self.assertEqual( ["pack-1.pack"], list( diff --git a/tests/test_objects.py b/tests/test_objects.py index 81f2190c7..e7c65420e 100644 --- a/tests/test_objects.py +++ b/tests/test_objects.py @@ -73,10 +73,10 @@ class TestHexToSha(TestCase): - def test_simple(self): + def test_simple(self) -> None: self.assertEqual(b"\xab\xcd" * 10, hex_to_sha(b"abcd" * 10)) - def test_reverse(self): + def test_reverse(self) -> None: self.assertEqual(b"abcd" * 10, sha_to_hex(b"\xab\xcd" * 10)) @@ -100,46 +100,46 @@ def get_tag(self, sha): def commit(self, sha): return self.get_sha_file(Commit, "commits", sha) - def test_decompress_simple_blob(self): + def test_decompress_simple_blob(self) -> None: b = self.get_blob(a_sha) self.assertEqual(b.data, b"test 1\n") self.assertEqual(b.sha().hexdigest().encode("ascii"), a_sha) - def test_hash(self): + def test_hash(self) -> None: b = self.get_blob(a_sha) self.assertEqual(hash(b.id), hash(b)) - def test_parse_empty_blob_object(self): + def test_parse_empty_blob_object(self) -> None: sha = b"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391" b = self.get_blob(sha) self.assertEqual(b.data, b"") self.assertEqual(b.id, sha) self.assertEqual(b.sha().hexdigest().encode("ascii"), sha) - def test_create_blob_from_string(self): + def test_create_blob_from_string(self) -> None: string = b"test 2\n" b = Blob.from_string(string) self.assertEqual(b.data, string) self.assertEqual(b.sha().hexdigest().encode("ascii"), b_sha) - def test_legacy_from_file(self): + def test_legacy_from_file(self) -> None: b1 = Blob.from_string(b"foo") b_raw = b1.as_legacy_object() b2 = b1.from_file(BytesIO(b_raw)) self.assertEqual(b1, b2) - def test_legacy_from_file_compression_level(self): + def test_legacy_from_file_compression_level(self) -> None: b1 = Blob.from_string(b"foo") b_raw = b1.as_legacy_object(compression_level=6) b2 = b1.from_file(BytesIO(b_raw)) self.assertEqual(b1, b2) - def test_chunks(self): + def test_chunks(self) -> None: string = b"test 5\n" b = Blob.from_string(string) self.assertEqual([string], b.chunked) - def test_splitlines(self): + def test_splitlines(self) -> None: for case in [ [], [b"foo\nbar\n"], @@ -155,7 +155,7 @@ def test_splitlines(self): b.chunked = case self.assertEqual(b.data.splitlines(True), b.splitlines()) - def test_set_chunks(self): + def test_set_chunks(self) -> None: b = Blob() b.chunked = [b"te", b"st", b" 5\n"] self.assertEqual(b"test 5\n", b.data) @@ -163,26 +163,26 @@ def test_set_chunks(self): self.assertEqual(b"test 6\n", b.as_raw_string()) self.assertEqual(b"test 6\n", bytes(b)) - def test_parse_legacy_blob(self): + def test_parse_legacy_blob(self) -> None: string = b"test 3\n" b = self.get_blob(c_sha) self.assertEqual(b.data, string) self.assertEqual(b.sha().hexdigest().encode("ascii"), c_sha) - def test_eq(self): + def test_eq(self) -> None: blob1 = self.get_blob(a_sha) blob2 = self.get_blob(a_sha) self.assertEqual(blob1, blob2) - def test_read_tree_from_file(self): + def test_read_tree_from_file(self) -> None: t = self.get_tree(tree_sha) self.assertEqual(t.items()[0], (b"a", 33188, a_sha)) self.assertEqual(t.items()[1], (b"b", 33188, b_sha)) - def test_read_tree_from_file_parse_count(self): + def test_read_tree_from_file_parse_count(self) -> None: old_deserialize = Tree._deserialize - def reset_deserialize(): + def reset_deserialize() -> None: Tree._deserialize = old_deserialize self.addCleanup(reset_deserialize) @@ -198,7 +198,7 @@ def counting_deserialize(*args, **kwargs): self.assertEqual(t.items()[1], (b"b", 33188, b_sha)) self.assertEqual(self.deserialize_count, 1) - def test_read_tag_from_file(self): + def test_read_tag_from_file(self) -> None: t = self.get_tag(tag_sha) self.assertEqual( t.object, (Commit, b"51b668fd5bf7061b7d6fa525f88803e6cfadaa51") @@ -219,7 +219,7 @@ def test_read_tag_from_file(self): b"-----END PGP SIGNATURE-----\n", ) - def test_read_commit_from_file(self): + def test_read_commit_from_file(self) -> None: sha = b"60dacdc733de308bb77bb76ce0fb0f9b44c9769e" c = self.commit(sha) self.assertEqual(c.tree, tree_sha) @@ -231,7 +231,7 @@ def test_read_commit_from_file(self): self.assertEqual(c.author_timezone, 0) self.assertEqual(c.message, b"Test commit\n") - def test_read_commit_no_parents(self): + def test_read_commit_no_parents(self) -> None: sha = b"0d89f20333fbb1d2f3a94da77f4981373d8f4310" c = self.commit(sha) self.assertEqual(c.tree, b"90182552c4a85a45ec2a835cadc3451bebdfe870") @@ -243,7 +243,7 @@ def test_read_commit_no_parents(self): self.assertEqual(c.author_timezone, 0) self.assertEqual(c.message, b"Test commit\n") - def test_read_commit_two_parents(self): + def test_read_commit_two_parents(self) -> None: sha = b"5dac377bdded4c9aeb8dff595f0faeebcc8498cc" c = self.commit(sha) self.assertEqual(c.tree, b"d80c186a03f423a81b39df39dc87fd269736ca86") @@ -261,7 +261,7 @@ def test_read_commit_two_parents(self): self.assertEqual(c.author_timezone, 0) self.assertEqual(c.message, b"Merge ../b\n") - def test_stub_sha(self): + def test_stub_sha(self) -> None: sha = b"5" * 40 c = make_commit(id=sha, message=b"foo") self.assertIsInstance(c, Commit) @@ -270,16 +270,16 @@ def test_stub_sha(self): class ShaFileCheckTests(TestCase): - def assertCheckFails(self, cls, data): + def assertCheckFails(self, cls, data) -> None: obj = cls() - def do_check(): + def do_check() -> None: obj.set_raw_string(data) obj.check() self.assertRaises(ObjectFormatException, do_check) - def assertCheckSucceeds(self, cls, data): + def assertCheckSucceeds(self, cls, data) -> None: obj = cls() obj.set_raw_string(data) self.assertEqual(None, obj.check()) @@ -300,7 +300,7 @@ def assertCheckSucceeds(self, cls, data): class ShaFileTests(TestCase): - def test_deflated_smaller_window_buffer(self): + def test_deflated_smaller_window_buffer(self) -> None: # zlib on some systems uses smaller buffers, # resulting in a different header. # See https://github.com/libgit2/libgit2/pull/464 @@ -328,17 +328,17 @@ def make_commit(self, **kwargs): attrs.update(kwargs) return make_commit(**attrs) - def test_encoding(self): + def test_encoding(self) -> None: c = self.make_commit(encoding=b"iso8859-1") self.assertIn(b"encoding iso8859-1\n", c.as_raw_string()) - def test_short_timestamp(self): + def test_short_timestamp(self) -> None: c = self.make_commit(commit_time=30) c1 = Commit() c1.set_raw_string(c.as_raw_string()) self.assertEqual(30, c1.commit_time) - def test_full_tree(self): + def test_full_tree(self) -> None: c = self.make_commit(commit_time=30) t = Tree() t.add(b"data-x", 0o644, Blob().id) @@ -348,11 +348,11 @@ def test_full_tree(self): self.assertEqual(t.id, c1.tree) self.assertEqual(c.as_raw_string(), c1.as_raw_string()) - def test_raw_length(self): + def test_raw_length(self) -> None: c = self.make_commit() self.assertEqual(len(c.as_raw_string()), c.raw_length()) - def test_simple(self): + def test_simple(self) -> None: c = self.make_commit() self.assertEqual(c.id, b"5dac377bdded4c9aeb8dff595f0faeebcc8498cc") self.assertEqual( @@ -368,21 +368,21 @@ def test_simple(self): c.as_raw_string(), ) - def test_timezone(self): + def test_timezone(self) -> None: c = self.make_commit(commit_timezone=(5 * 60)) self.assertIn(b" +0005\n", c.as_raw_string()) - def test_neg_timezone(self): + def test_neg_timezone(self) -> None: c = self.make_commit(commit_timezone=(-1 * 3600)) self.assertIn(b" -0100\n", c.as_raw_string()) - def test_deserialize(self): + def test_deserialize(self) -> None: c = self.make_commit() d = Commit() d._deserialize(c.as_raw_chunks()) self.assertEqual(c, d) - def test_serialize_gpgsig(self): + def test_serialize_gpgsig(self) -> None: commit = self.make_commit( gpgsig=b"""-----BEGIN PGP SIGNATURE----- Version: GnuPG v1 @@ -433,7 +433,7 @@ def test_serialize_gpgsig(self): commit.as_raw_string(), ) - def test_serialize_mergetag(self): + def test_serialize_mergetag(self) -> None: tag = make_object( Tag, object=(Commit, b"a38d6181ff27824c79fc7df825164a212eff6a3f"), @@ -471,7 +471,7 @@ def test_serialize_mergetag(self): commit.as_raw_string(), ) - def test_serialize_mergetags(self): + def test_serialize_mergetags(self) -> None: tag = make_object( Tag, object=(Commit, b"a38d6181ff27824c79fc7df825164a212eff6a3f"), @@ -522,7 +522,7 @@ def test_serialize_mergetags(self): commit.as_raw_string(), ) - def test_deserialize_mergetag(self): + def test_deserialize_mergetag(self) -> None: tag = make_object( Tag, object=(Commit, b"a38d6181ff27824c79fc7df825164a212eff6a3f"), @@ -539,7 +539,7 @@ def test_deserialize_mergetag(self): d._deserialize(commit.as_raw_chunks()) self.assertEqual(commit, d) - def test_deserialize_mergetags(self): + def test_deserialize_mergetags(self) -> None: tag = make_object( Tag, object=(Commit, b"a38d6181ff27824c79fc7df825164a212eff6a3f"), @@ -596,7 +596,7 @@ def make_commit_lines( def make_commit_text(self, **kwargs): return b"\n".join(self.make_commit_lines(**kwargs)) - def test_simple(self): + def test_simple(self) -> None: c = Commit.from_string(self.make_commit_text()) self.assertEqual(b"Merge ../b\n", c.message) self.assertEqual(b"James Westby ", c.author) @@ -620,15 +620,15 @@ def test_simple(self): self.assertEqual(0, c.author_timezone) self.assertEqual(None, c.encoding) - def test_custom(self): + def test_custom(self) -> None: c = Commit.from_string(self.make_commit_text(extra={b"extra-field": b"data"})) self.assertEqual([(b"extra-field", b"data")], c._extra) - def test_encoding(self): + def test_encoding(self) -> None: c = Commit.from_string(self.make_commit_text(encoding=b"UTF-8")) self.assertEqual(b"UTF-8", c.encoding) - def test_check(self): + def test_check(self) -> None: self.assertCheckSucceeds(Commit, self.make_commit_text()) self.assertCheckSucceeds(Commit, self.make_commit_text(parents=None)) self.assertCheckSucceeds(Commit, self.make_commit_text(encoding=b"UTF-8")) @@ -644,7 +644,7 @@ def test_check(self): Commit, self.make_commit_text(author=None, committer=None) ) - def test_check_duplicates(self): + def test_check_duplicates(self) -> None: # duplicate each of the header fields for i in range(5): lines = self.make_commit_lines(parents=[a_sha], encoding=b"UTF-8") @@ -656,7 +656,7 @@ def test_check_duplicates(self): else: self.assertCheckFails(Commit, text) - def test_check_order(self): + def test_check_order(self) -> None: lines = self.make_commit_lines(parents=[a_sha], encoding=b"UTF-8") headers = lines[:5] rest = lines[5:] @@ -669,7 +669,7 @@ def test_check_order(self): else: self.assertCheckFails(Commit, text) - def test_check_commit_with_unparseable_time(self): + def test_check_commit_with_unparseable_time(self) -> None: identity_with_wrong_time = ( b"Igor Sysoev 18446743887488505614+42707004" ) @@ -688,7 +688,7 @@ def test_check_commit_with_unparseable_time(self): ), ) - def test_check_commit_with_overflow_date(self): + def test_check_commit_with_overflow_date(self) -> None: """Date with overflow should raise an ObjectFormatException when checked.""" identity_with_wrong_time = ( b"Igor Sysoev 18446743887488505614 +42707004" @@ -709,7 +709,7 @@ def test_check_commit_with_overflow_date(self): with self.assertRaises(ObjectFormatException): commit.check() - def test_mangled_author_line(self): + def test_mangled_author_line(self) -> None: """Mangled author line should successfully parse.""" author_line = ( b'Karl MacMillan <"Karl MacMillan ' @@ -728,7 +728,7 @@ def test_mangled_author_line(self): with self.assertRaises(ObjectFormatException): commit.check() - def test_parse_gpgsig(self): + def test_parse_gpgsig(self) -> None: c = Commit.from_string( b"""tree aaff74984cccd156a469afa7d9ab10e4777beb24 author Jelmer Vernooij 1412179807 +0200 @@ -777,7 +777,7 @@ def test_parse_gpgsig(self): c.gpgsig, ) - def test_parse_header_trailing_newline(self): + def test_parse_header_trailing_newline(self) -> None: c = Commit.from_string( b"""\ tree a7d6277f78d3ecd0230a1a5df6db00b1d9c521ac @@ -852,40 +852,40 @@ def test_parse_header_trailing_newline(self): class TreeTests(ShaFileCheckTests): - def test_add(self): + def test_add(self) -> None: myhexsha = b"d80c186a03f423a81b39df39dc87fd269736ca86" x = Tree() x.add(b"myname", 0o100755, myhexsha) self.assertEqual(x[b"myname"], (0o100755, myhexsha)) self.assertEqual(b"100755 myname\0" + hex_to_sha(myhexsha), x.as_raw_string()) - def test_simple(self): + def test_simple(self) -> None: myhexsha = b"d80c186a03f423a81b39df39dc87fd269736ca86" x = Tree() x[b"myname"] = (0o100755, myhexsha) self.assertEqual(b"100755 myname\0" + hex_to_sha(myhexsha), x.as_raw_string()) self.assertEqual(b"100755 myname\0" + hex_to_sha(myhexsha), bytes(x)) - def test_tree_update_id(self): + def test_tree_update_id(self) -> None: x = Tree() x[b"a.c"] = (0o100755, b"d80c186a03f423a81b39df39dc87fd269736ca86") self.assertEqual(b"0c5c6bc2c081accfbc250331b19e43b904ab9cdd", x.id) x[b"a.b"] = (stat.S_IFDIR, b"d80c186a03f423a81b39df39dc87fd269736ca86") self.assertEqual(b"07bfcb5f3ada15bbebdfa3bbb8fd858a363925c8", x.id) - def test_tree_iteritems_dir_sort(self): + def test_tree_iteritems_dir_sort(self) -> None: x = Tree() for name, item in _TREE_ITEMS.items(): x[name] = item self.assertEqual(_SORTED_TREE_ITEMS, x.items()) - def test_tree_items_dir_sort(self): + def test_tree_items_dir_sort(self) -> None: x = Tree() for name, item in _TREE_ITEMS.items(): x[name] = item self.assertEqual(_SORTED_TREE_ITEMS, x.items()) - def _do_test_parse_tree(self, parse_tree): + def _do_test_parse_tree(self, parse_tree) -> None: dir = os.path.join(os.path.dirname(__file__), "..", "testdata", "trees") o = Tree.from_path(hex_to_filename(dir, tree_sha)) self.assertEqual( @@ -908,7 +908,7 @@ def eval_parse_tree(*args, **kwargs): _do_test_parse_tree, _parse_tree_rs ) - def _do_test_sorted_tree_items(self, sorted_tree_items): + def _do_test_sorted_tree_items(self, sorted_tree_items) -> None: def do_sort(entries, name_order): return list(sorted_tree_items(entries, name_order)) @@ -946,7 +946,7 @@ def do_sort(entries, name_order): _do_test_sorted_tree_items, _sorted_tree_items_rs ) - def _do_test_sorted_tree_items_name_order(self, sorted_tree_items): + def _do_test_sorted_tree_items_name_order(self, sorted_tree_items) -> None: self.assertEqual( [ TreeEntry( @@ -986,7 +986,7 @@ def _do_test_sorted_tree_items_name_order(self, sorted_tree_items): _do_test_sorted_tree_items_name_order, _sorted_tree_items_rs ) - def test_check(self): + def test_check(self) -> None: t = Tree sha = hex_to_sha(a_sha) @@ -1020,14 +1020,14 @@ def test_check(self): self.assertCheckFails(t, b"100644 a\0" + sha + b"100755 a\0" + sha2) self.assertCheckFails(t, b"100644 b\0" + sha2 + b"100644 a\0" + sha) - def test_iter(self): + def test_iter(self) -> None: t = Tree() t[b"foo"] = (0o100644, a_sha) self.assertEqual({b"foo"}, set(t)) class TagSerializeTests(TestCase): - def test_serialize_simple(self): + def test_serialize_simple(self) -> None: x = make_object( Tag, tagger=b"Jelmer Vernooij ", @@ -1050,7 +1050,7 @@ def test_serialize_simple(self): x.as_raw_string(), ) - def test_serialize_none_message(self): + def test_serialize_none_message(self) -> None: x = make_object( Tag, tagger=b"Jelmer Vernooij ", @@ -1112,7 +1112,7 @@ def make_tag_lines( def make_tag_text(self, **kwargs): return b"\n".join(self.make_tag_lines(**kwargs)) - def test_parse(self): + def test_parse(self) -> None: x = Tag() x.set_raw_string(self.make_tag_text()) self.assertEqual( @@ -1128,14 +1128,14 @@ def test_parse(self): ) self.assertEqual(-25200, x.tag_timezone) - def test_parse_no_tagger(self): + def test_parse_no_tagger(self) -> None: x = Tag() x.set_raw_string(self.make_tag_text(tagger=None)) self.assertEqual(None, x.tagger) self.assertEqual(b"v2.6.22-rc7", x.name) self.assertEqual(None, x.tag_time) - def test_parse_no_message(self): + def test_parse_no_message(self) -> None: x = Tag() x.set_raw_string(self.make_tag_text(message=None)) self.assertEqual(None, x.message) @@ -1149,7 +1149,7 @@ def test_parse_no_message(self): self.assertEqual(-25200, x.tag_timezone) self.assertEqual(b"v2.6.22-rc7", x.name) - def test_check(self): + def test_check(self) -> None: self.assertCheckSucceeds(Tag, self.make_tag_text()) self.assertCheckFails(Tag, self.make_tag_text(object_sha=None)) self.assertCheckFails(Tag, self.make_tag_text(object_type_name=None)) @@ -1173,7 +1173,7 @@ def test_check(self): ) self.assertCheckFails(Tag, self.make_tag_text(object_sha=b"xxx")) - def test_check_tag_with_unparseable_field(self): + def test_check_tag_with_unparseable_field(self) -> None: self.assertCheckFails( Tag, self.make_tag_text( @@ -1184,21 +1184,21 @@ def test_check_tag_with_unparseable_field(self): ), ) - def test_check_tag_with_overflow_time(self): + def test_check_tag_with_overflow_time(self) -> None: """Date with overflow should raise an ObjectFormatException when checked.""" author = f"Some Dude {MAX_TIME + 1} +0000" tag = Tag.from_string(self.make_tag_text(tagger=(author.encode()))) with self.assertRaises(ObjectFormatException): tag.check() - def test_check_duplicates(self): + def test_check_duplicates(self) -> None: # duplicate each of the header fields for i in range(4): lines = self.make_tag_lines() lines.insert(i, lines[i]) self.assertCheckFails(Tag, b"\n".join(lines)) - def test_check_order(self): + def test_check_order(self) -> None: lines = self.make_tag_lines() headers = lines[:4] rest = lines[4:] @@ -1211,7 +1211,7 @@ def test_check_order(self): else: self.assertCheckFails(Tag, text) - def test_tree_copy_after_update(self): + def test_tree_copy_after_update(self) -> None: """Check Tree.id is correctly updated when the tree is copied after updated.""" shas = [] tree = Tree() @@ -1226,7 +1226,7 @@ def test_tree_copy_after_update(self): class CheckTests(TestCase): - def test_check_hexsha(self): + def test_check_hexsha(self) -> None: check_hexsha(a_sha, "failed to check good sha") self.assertRaises( ObjectFormatException, check_hexsha, b"1" * 39, "sha too short" @@ -1241,7 +1241,7 @@ def test_check_hexsha(self): "invalid characters", ) - def test_check_identity(self): + def test_check_identity(self) -> None: check_identity( b"Dave Borowitz ", "failed to check good identity", @@ -1319,46 +1319,46 @@ def test_check_identity(self): class TimezoneTests(TestCase): - def test_parse_timezone_utc(self): + def test_parse_timezone_utc(self) -> None: self.assertEqual((0, False), parse_timezone(b"+0000")) - def test_parse_timezone_utc_negative(self): + def test_parse_timezone_utc_negative(self) -> None: self.assertEqual((0, True), parse_timezone(b"-0000")) - def test_generate_timezone_utc(self): + def test_generate_timezone_utc(self) -> None: self.assertEqual(b"+0000", format_timezone(0)) - def test_generate_timezone_utc_negative(self): + def test_generate_timezone_utc_negative(self) -> None: self.assertEqual(b"-0000", format_timezone(0, True)) - def test_parse_timezone_cet(self): + def test_parse_timezone_cet(self) -> None: self.assertEqual((60 * 60, False), parse_timezone(b"+0100")) - def test_format_timezone_cet(self): + def test_format_timezone_cet(self) -> None: self.assertEqual(b"+0100", format_timezone(60 * 60)) - def test_format_timezone_pdt(self): + def test_format_timezone_pdt(self) -> None: self.assertEqual(b"-0400", format_timezone(-4 * 60 * 60)) - def test_parse_timezone_pdt(self): + def test_parse_timezone_pdt(self) -> None: self.assertEqual((-4 * 60 * 60, False), parse_timezone(b"-0400")) - def test_format_timezone_pdt_half(self): + def test_format_timezone_pdt_half(self) -> None: self.assertEqual(b"-0440", format_timezone(int(((-4 * 60) - 40) * 60))) - def test_format_timezone_double_negative(self): + def test_format_timezone_double_negative(self) -> None: self.assertEqual(b"--700", format_timezone(int((7 * 60) * 60), True)) - def test_parse_timezone_pdt_half(self): + def test_parse_timezone_pdt_half(self) -> None: self.assertEqual((((-4 * 60) - 40) * 60, False), parse_timezone(b"-0440")) - def test_parse_timezone_double_negative(self): + def test_parse_timezone_double_negative(self) -> None: self.assertEqual((int((7 * 60) * 60), False), parse_timezone(b"+700")) self.assertEqual((int((7 * 60) * 60), True), parse_timezone(b"--700")) class ShaFileCopyTests(TestCase): - def assert_copy(self, orig): + def assert_copy(self, orig) -> None: oclass = object_class(orig.type_num) copy = orig.copy() @@ -1366,7 +1366,7 @@ def assert_copy(self, orig): self.assertEqual(copy, orig) self.assertIsNot(copy, orig) - def test_commit_copy(self): + def test_commit_copy(self) -> None: attrs = { "tree": b"d80c186a03f423a81b39df39dc87fd269736ca86", "parents": [ @@ -1384,17 +1384,17 @@ def test_commit_copy(self): commit = make_commit(**attrs) self.assert_copy(commit) - def test_blob_copy(self): + def test_blob_copy(self) -> None: blob = make_object(Blob, data=b"i am a blob") self.assert_copy(blob) - def test_tree_copy(self): + def test_tree_copy(self) -> None: blob = make_object(Blob, data=b"i am a blob") tree = Tree() tree[b"blob"] = (stat.S_IFREG, blob.id) self.assert_copy(tree) - def test_tag_copy(self): + def test_tag_copy(self) -> None: tag = make_object( Tag, name=b"tag", @@ -1427,7 +1427,7 @@ def assert_serialization_on_change( self.assertFalse(obj._needs_serialization) self.assertNotEqual(old_id, new_id) - def test_commit_serialize(self): + def test_commit_serialize(self) -> None: attrs = { "tree": b"d80c186a03f423a81b39df39dc87fd269736ca86", "parents": [ @@ -1447,7 +1447,7 @@ def test_commit_serialize(self): with self.assert_serialization_on_change(commit): commit.parents = [b"ab64bbdcc51b170d21588e5c5d391ee5c0c96dfd"] - def test_blob_serialize(self): + def test_blob_serialize(self) -> None: blob = make_object(Blob, data=b"i am a blob") with self.assert_serialization_on_change( @@ -1455,7 +1455,7 @@ def test_blob_serialize(self): ): blob.data = b"i am another blob" - def test_tree_serialize(self): + def test_tree_serialize(self) -> None: blob = make_object(Blob, data=b"i am a blob") tree = Tree() tree[b"blob"] = (stat.S_IFREG, blob.id) @@ -1463,7 +1463,7 @@ def test_tree_serialize(self): with self.assert_serialization_on_change(tree): tree[b"blob2"] = (stat.S_IFREG, blob.id) - def test_tag_serialize(self): + def test_tag_serialize(self) -> None: tag = make_object( Tag, name=b"tag", @@ -1477,7 +1477,7 @@ def test_tag_serialize(self): with self.assert_serialization_on_change(tag): tag.message = b"new message" - def test_tag_serialize_time_error(self): + def test_tag_serialize_time_error(self) -> None: with self.assertRaises(ObjectFormatException): tag = make_object( Tag, @@ -1490,7 +1490,7 @@ def test_tag_serialize_time_error(self): class PrettyFormatTreeEntryTests(TestCase): - def test_format(self): + def test_format(self) -> None: self.assertEqual( "40000 tree 40820c38cfb182ce6c8b261555410d8382a5918b\tfoo\n", pretty_format_tree_entry( diff --git a/tests/test_objectspec.py b/tests/test_objectspec.py index d21e3eee4..b51c613de 100644 --- a/tests/test_objectspec.py +++ b/tests/test_objectspec.py @@ -42,11 +42,11 @@ class ParseObjectTests(TestCase): """Test parse_object.""" - def test_nonexistent(self): + def test_nonexistent(self) -> None: r = MemoryRepo() self.assertRaises(KeyError, parse_object, r, "thisdoesnotexist") - def test_blob_by_sha(self): + def test_blob_by_sha(self) -> None: r = MemoryRepo() b = Blob.from_string(b"Blah") r.object_store.add_object(b) @@ -56,11 +56,11 @@ def test_blob_by_sha(self): class ParseCommitRangeTests(TestCase): """Test parse_commit_range.""" - def test_nonexistent(self): + def test_nonexistent(self) -> None: r = MemoryRepo() self.assertRaises(KeyError, parse_commit_range, r, "thisdoesnotexist") - def test_commit_by_sha(self): + def test_commit_by_sha(self) -> None: r = MemoryRepo() c1, c2, c3 = build_commit_graph(r.object_store, [[1], [2, 1], [3, 1, 2]]) self.assertEqual([c1], list(parse_commit_range(r, c1.id))) @@ -69,27 +69,27 @@ def test_commit_by_sha(self): class ParseCommitTests(TestCase): """Test parse_commit.""" - def test_nonexistent(self): + def test_nonexistent(self) -> None: r = MemoryRepo() self.assertRaises(KeyError, parse_commit, r, "thisdoesnotexist") - def test_commit_by_sha(self): + def test_commit_by_sha(self) -> None: r = MemoryRepo() [c1] = build_commit_graph(r.object_store, [[1]]) self.assertEqual(c1, parse_commit(r, c1.id)) - def test_commit_by_short_sha(self): + def test_commit_by_short_sha(self) -> None: r = MemoryRepo() [c1] = build_commit_graph(r.object_store, [[1]]) self.assertEqual(c1, parse_commit(r, c1.id[:10])) class ParseRefTests(TestCase): - def test_nonexistent(self): + def test_nonexistent(self) -> None: r = {} self.assertRaises(KeyError, parse_ref, r, b"thisdoesnotexist") - def test_ambiguous_ref(self): + def test_ambiguous_ref(self) -> None: r = { b"ambig1": "bla", b"refs/ambig1": "bla", @@ -100,7 +100,7 @@ def test_ambiguous_ref(self): } self.assertEqual(b"ambig1", parse_ref(r, b"ambig1")) - def test_ambiguous_ref2(self): + def test_ambiguous_ref2(self) -> None: r = { b"refs/ambig2": "bla", b"refs/tags/ambig2": "bla", @@ -110,7 +110,7 @@ def test_ambiguous_ref2(self): } self.assertEqual(b"refs/ambig2", parse_ref(r, b"ambig2")) - def test_ambiguous_tag(self): + def test_ambiguous_tag(self) -> None: r = { b"refs/tags/ambig3": "bla", b"refs/heads/ambig3": "bla", @@ -119,7 +119,7 @@ def test_ambiguous_tag(self): } self.assertEqual(b"refs/tags/ambig3", parse_ref(r, b"ambig3")) - def test_ambiguous_head(self): + def test_ambiguous_head(self) -> None: r = { b"refs/heads/ambig4": "bla", b"refs/remotes/ambig4": "bla", @@ -127,47 +127,47 @@ def test_ambiguous_head(self): } self.assertEqual(b"refs/heads/ambig4", parse_ref(r, b"ambig4")) - def test_ambiguous_remote(self): + def test_ambiguous_remote(self) -> None: r = {b"refs/remotes/ambig5": "bla", b"refs/remotes/ambig5/HEAD": "bla"} self.assertEqual(b"refs/remotes/ambig5", parse_ref(r, b"ambig5")) - def test_ambiguous_remote_head(self): + def test_ambiguous_remote_head(self) -> None: r = {b"refs/remotes/ambig6/HEAD": "bla"} self.assertEqual(b"refs/remotes/ambig6/HEAD", parse_ref(r, b"ambig6")) - def test_heads_full(self): + def test_heads_full(self) -> None: r = {b"refs/heads/foo": "bla"} self.assertEqual(b"refs/heads/foo", parse_ref(r, b"refs/heads/foo")) - def test_heads_partial(self): + def test_heads_partial(self) -> None: r = {b"refs/heads/foo": "bla"} self.assertEqual(b"refs/heads/foo", parse_ref(r, b"heads/foo")) - def test_tags_partial(self): + def test_tags_partial(self) -> None: r = {b"refs/tags/foo": "bla"} self.assertEqual(b"refs/tags/foo", parse_ref(r, b"tags/foo")) class ParseRefsTests(TestCase): - def test_nonexistent(self): + def test_nonexistent(self) -> None: r = {} self.assertRaises(KeyError, parse_refs, r, [b"thisdoesnotexist"]) - def test_head(self): + def test_head(self) -> None: r = {b"refs/heads/foo": "bla"} self.assertEqual([b"refs/heads/foo"], parse_refs(r, [b"foo"])) - def test_full(self): + def test_full(self) -> None: r = {b"refs/heads/foo": "bla"} self.assertEqual([b"refs/heads/foo"], parse_refs(r, b"refs/heads/foo")) class ParseReftupleTests(TestCase): - def test_nonexistent(self): + def test_nonexistent(self) -> None: r = {} self.assertRaises(KeyError, parse_reftuple, r, r, b"thisdoesnotexist") - def test_head(self): + def test_head(self) -> None: r = {b"refs/heads/foo": "bla"} self.assertEqual( (b"refs/heads/foo", b"refs/heads/foo", False), @@ -186,28 +186,28 @@ def test_head(self): parse_reftuple(r, {}, b"foo", True), ) - def test_full(self): + def test_full(self) -> None: r = {b"refs/heads/foo": "bla"} self.assertEqual( (b"refs/heads/foo", b"refs/heads/foo", False), parse_reftuple(r, r, b"refs/heads/foo"), ) - def test_no_left_ref(self): + def test_no_left_ref(self) -> None: r = {b"refs/heads/foo": "bla"} self.assertEqual( (None, b"refs/heads/foo", False), parse_reftuple(r, r, b":refs/heads/foo"), ) - def test_no_right_ref(self): + def test_no_right_ref(self) -> None: r = {b"refs/heads/foo": "bla"} self.assertEqual( (b"refs/heads/foo", None, False), parse_reftuple(r, r, b"refs/heads/foo:"), ) - def test_default_with_string(self): + def test_default_with_string(self) -> None: r = {b"refs/heads/foo": "bla"} self.assertEqual( (b"refs/heads/foo", b"refs/heads/foo", False), @@ -216,18 +216,18 @@ def test_default_with_string(self): class ParseReftuplesTests(TestCase): - def test_nonexistent(self): + def test_nonexistent(self) -> None: r = {} self.assertRaises(KeyError, parse_reftuples, r, r, [b"thisdoesnotexist"]) - def test_head(self): + def test_head(self) -> None: r = {b"refs/heads/foo": "bla"} self.assertEqual( [(b"refs/heads/foo", b"refs/heads/foo", False)], parse_reftuples(r, r, [b"foo"]), ) - def test_full(self): + def test_full(self) -> None: r = {b"refs/heads/foo": "bla"} self.assertEqual( [(b"refs/heads/foo", b"refs/heads/foo", False)], @@ -243,17 +243,17 @@ def test_full(self): class ParseTreeTests(TestCase): """Test parse_tree.""" - def test_nonexistent(self): + def test_nonexistent(self) -> None: r = MemoryRepo() self.assertRaises(KeyError, parse_tree, r, "thisdoesnotexist") - def test_from_commit(self): + def test_from_commit(self) -> None: r = MemoryRepo() c1, c2, c3 = build_commit_graph(r.object_store, [[1], [2, 1], [3, 1, 2]]) self.assertEqual(r[c1.tree], parse_tree(r, c1.id)) self.assertEqual(r[c1.tree], parse_tree(r, c1.tree)) - def test_from_ref(self): + def test_from_ref(self) -> None: r = MemoryRepo() c1, c2, c3 = build_commit_graph(r.object_store, [[1], [2, 1], [3, 1, 2]]) r.refs[b"refs/heads/foo"] = c1.id diff --git a/tests/test_pack.py b/tests/test_pack.py index a2ec82adf..b07c42300 100644 --- a/tests/test_pack.py +++ b/tests/test_pack.py @@ -28,6 +28,7 @@ import zlib from hashlib import sha1 from io import BytesIO +from typing import NoReturn from dulwich.errors import ApplyDeltaError, ChecksumMismatch from dulwich.file import GitFile @@ -73,7 +74,7 @@ class PackTests(TestCase): """Base class for testing packs.""" - def setUp(self): + def setUp(self) -> None: super().setUp() self.tempdir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.tempdir) @@ -97,7 +98,7 @@ def get_pack_data(self, sha): def get_pack(self, sha): return Pack(os.path.join(self.datadir, "pack-{}".format(sha.decode("ascii")))) - def assertSucceeds(self, func, *args, **kwargs): + def assertSucceeds(self, func, *args, **kwargs) -> None: try: func(*args, **kwargs) except ChecksumMismatch as e: @@ -107,7 +108,7 @@ def assertSucceeds(self, func, *args, **kwargs): class PackIndexTests(PackTests): """Class that tests the index of packfiles.""" - def test_object_offset(self): + def test_object_offset(self) -> None: """Tests that the correct object offset is returned from the index.""" p = self.get_pack_index(pack1_sha) self.assertRaises(KeyError, p.object_offset, pack1_sha) @@ -115,7 +116,7 @@ def test_object_offset(self): self.assertEqual(p.object_offset(tree_sha), 138) self.assertEqual(p.object_offset(commit_sha), 12) - def test_object_sha1(self): + def test_object_sha1(self) -> None: """Tests that the correct object offset is returned from the index.""" p = self.get_pack_index(pack1_sha) self.assertRaises(KeyError, p.object_sha1, 876) @@ -123,7 +124,7 @@ def test_object_sha1(self): self.assertEqual(p.object_sha1(138), hex_to_sha(tree_sha)) self.assertEqual(p.object_sha1(12), hex_to_sha(commit_sha)) - def test_iter_prefix(self): + def test_iter_prefix(self) -> None: p = self.get_pack_index(pack1_sha) self.assertEqual([p.object_sha1(178)], list(p.iter_prefix(hex_to_sha(a_sha)))) self.assertEqual( @@ -133,11 +134,11 @@ def test_iter_prefix(self): [p.object_sha1(178)], list(p.iter_prefix(hex_to_sha(a_sha)[:2])) ) - def test_index_len(self): + def test_index_len(self) -> None: p = self.get_pack_index(pack1_sha) self.assertEqual(3, len(p)) - def test_get_stored_checksum(self): + def test_get_stored_checksum(self) -> None: p = self.get_pack_index(pack1_sha) self.assertEqual( b"f2848e2ad16f329ae1c92e3b95e91888daa5bd01", @@ -148,11 +149,11 @@ def test_get_stored_checksum(self): sha_to_hex(p.get_pack_checksum()), ) - def test_index_check(self): + def test_index_check(self) -> None: p = self.get_pack_index(pack1_sha) self.assertSucceeds(p.check) - def test_iterentries(self): + def test_iterentries(self) -> None: p = self.get_pack_index(pack1_sha) entries = [(sha_to_hex(s), o, c) for s, o, c in p.iterentries()] self.assertEqual( @@ -164,7 +165,7 @@ def test_iterentries(self): entries, ) - def test_iter(self): + def test_iter(self) -> None: p = self.get_pack_index(pack1_sha) self.assertEqual({tree_sha, commit_sha, a_sha}, set(p)) @@ -178,36 +179,36 @@ class TestPackDeltas(TestCase): test_string_big = b"Z" * 8192 test_string_huge = b"Z" * 100000 - def _test_roundtrip(self, base, target): + def _test_roundtrip(self, base, target) -> None: self.assertEqual( target, b"".join(apply_delta(base, list(create_delta(base, target)))) ) - def test_nochange(self): + def test_nochange(self) -> None: self._test_roundtrip(self.test_string1, self.test_string1) - def test_nochange_huge(self): + def test_nochange_huge(self) -> None: self._test_roundtrip(self.test_string_huge, self.test_string_huge) - def test_change(self): + def test_change(self) -> None: self._test_roundtrip(self.test_string1, self.test_string2) - def test_rewrite(self): + def test_rewrite(self) -> None: self._test_roundtrip(self.test_string1, self.test_string3) - def test_empty_to_big(self): + def test_empty_to_big(self) -> None: self._test_roundtrip(self.test_string_empty, self.test_string_big) - def test_empty_to_huge(self): + def test_empty_to_huge(self) -> None: self._test_roundtrip(self.test_string_empty, self.test_string_huge) - def test_huge_copy(self): + def test_huge_copy(self) -> None: self._test_roundtrip( self.test_string_huge + self.test_string1, self.test_string_huge + self.test_string2, ) - def test_dest_overflow(self): + def test_dest_overflow(self) -> None: self.assertRaises( ApplyDeltaError, apply_delta, @@ -218,7 +219,7 @@ def test_dest_overflow(self): ApplyDeltaError, apply_delta, b"", b"\x00\x80\x02\xb0\x11\x11" ) - def test_pypy_issue(self): + def test_pypy_issue(self) -> None: # Test for https://github.com/jelmer/dulwich/issues/509 / # https://bitbucket.org/pypy/pypy/issues/2499/cpyext-pystring_asstring-doesnt-work chunks = [ @@ -261,25 +262,25 @@ def test_pypy_issue(self): class TestPackData(PackTests): """Tests getting the data from the packfile.""" - def test_create_pack(self): + def test_create_pack(self) -> None: self.get_pack_data(pack1_sha).close() - def test_from_file(self): + def test_from_file(self) -> None: path = os.path.join( self.datadir, "pack-{}.pack".format(pack1_sha.decode("ascii")) ) with open(path, "rb") as f: PackData.from_file(f, os.path.getsize(path)) - def test_pack_len(self): + def test_pack_len(self) -> None: with self.get_pack_data(pack1_sha) as p: self.assertEqual(3, len(p)) - def test_index_check(self): + def test_index_check(self) -> None: with self.get_pack_data(pack1_sha) as p: self.assertSucceeds(p.check) - def test_iter_unpacked(self): + def test_iter_unpacked(self) -> None: with self.get_pack_data(pack1_sha) as p: commit_data = ( b"tree b2a2766a2879c209ab1176e7e778b81ae422eeaa\n" @@ -317,7 +318,7 @@ def test_iter_unpacked(self): actual, ) - def test_iterentries(self): + def test_iterentries(self) -> None: with self.get_pack_data(pack1_sha) as p: entries = {(sha_to_hex(s), o, c) for s, o, c in p.iterentries()} self.assertEqual( @@ -341,7 +342,7 @@ def test_iterentries(self): entries, ) - def test_create_index_v1(self): + def test_create_index_v1(self) -> None: with self.get_pack_data(pack1_sha) as p: filename = os.path.join(self.tempdir, "v1test.idx") p.create_index_v1(filename) @@ -350,7 +351,7 @@ def test_create_index_v1(self): self.assertEqual(oct(os.stat(filename).st_mode), indexmode) self.assertEqual(idx1, idx2) - def test_create_index_v2(self): + def test_create_index_v2(self) -> None: with self.get_pack_data(pack1_sha) as p: filename = os.path.join(self.tempdir, "v2test.idx") p.create_index_v2(filename) @@ -359,7 +360,7 @@ def test_create_index_v2(self): self.assertEqual(oct(os.stat(filename).st_mode), indexmode) self.assertEqual(idx1, idx2) - def test_compute_file_sha(self): + def test_compute_file_sha(self) -> None: f = BytesIO(b"abcd1234wxyz") self.assertEqual( sha1(b"abcd1234wxyz").hexdigest(), compute_file_sha(f).hexdigest() @@ -381,7 +382,7 @@ def test_compute_file_sha(self): compute_file_sha(f, start_ofs=4, end_ofs=-4).hexdigest(), ) - def test_compute_file_sha_short_file(self): + def test_compute_file_sha_short_file(self) -> None: f = BytesIO(b"abcd1234wxyz") self.assertRaises(AssertionError, compute_file_sha, f, end_ofs=-20) self.assertRaises(AssertionError, compute_file_sha, f, end_ofs=20) @@ -391,28 +392,28 @@ def test_compute_file_sha_short_file(self): class TestPack(PackTests): - def test_len(self): + def test_len(self) -> None: with self.get_pack(pack1_sha) as p: self.assertEqual(3, len(p)) - def test_contains(self): + def test_contains(self) -> None: with self.get_pack(pack1_sha) as p: self.assertIn(tree_sha, p) - def test_get(self): + def test_get(self) -> None: with self.get_pack(pack1_sha) as p: self.assertEqual(type(p[tree_sha]), Tree) - def test_iter(self): + def test_iter(self) -> None: with self.get_pack(pack1_sha) as p: self.assertEqual({tree_sha, commit_sha, a_sha}, set(p)) - def test_iterobjects(self): + def test_iterobjects(self) -> None: with self.get_pack(pack1_sha) as p: expected = {p[s] for s in [commit_sha, tree_sha, a_sha]} self.assertEqual(expected, set(list(p.iterobjects()))) - def test_pack_tuples(self): + def test_pack_tuples(self) -> None: with self.get_pack(pack1_sha) as p: tuples = p.pack_tuples() expected = {(p[s], None) for s in [commit_sha, tree_sha, a_sha]} @@ -420,7 +421,7 @@ def test_pack_tuples(self): self.assertEqual(expected, set(list(tuples))) self.assertEqual(3, len(tuples)) - def test_get_object_at(self): + def test_get_object_at(self) -> None: """Tests random access for non-delta objects.""" with self.get_pack(pack1_sha) as p: obj = p[a_sha] @@ -433,7 +434,7 @@ def test_get_object_at(self): self.assertEqual(obj.type_name, b"commit") self.assertEqual(obj.sha().hexdigest().encode("ascii"), commit_sha) - def test_copy(self): + def test_copy(self) -> None: with self.get_pack(pack1_sha) as origpack: self.assertSucceeds(origpack.index.check) basename = os.path.join(self.tempdir, "Elch") @@ -453,7 +454,7 @@ def test_copy(self): new_checksum = newpack.index.get_stored_checksum() self.assertTrue(wrong_version or orig_checksum == new_checksum) - def test_commit_obj(self): + def test_commit_obj(self) -> None: with self.get_pack(pack1_sha) as p: commit = p[commit_sha] self.assertEqual(b"James Westby ", commit.author) @@ -464,7 +465,7 @@ def _copy_pack(self, origpack): write_pack(basename, origpack.pack_tuples()) return Pack(basename) - def test_keep_no_message(self): + def test_keep_no_message(self) -> None: with self.get_pack(pack1_sha) as p: p = self._copy_pack(p) @@ -478,7 +479,7 @@ def test_keep_no_message(self): buf = f.read() self.assertEqual("", buf) - def test_keep_message(self): + def test_keep_message(self) -> None: with self.get_pack(pack1_sha) as p: p = self._copy_pack(p) @@ -494,11 +495,11 @@ def test_keep_message(self): buf = f.read() self.assertEqual(msg + b"\n", buf) - def test_name(self): + def test_name(self) -> None: with self.get_pack(pack1_sha) as p: self.assertEqual(pack1_sha, p.name()) - def test_length_mismatch(self): + def test_length_mismatch(self) -> None: with self.get_pack_data(pack1_sha) as data: index = self.get_pack_index(pack1_sha) Pack.from_objects(data, index).check_length_and_checksum() @@ -513,7 +514,7 @@ def test_length_mismatch(self): self.assertRaises(AssertionError, lambda: bad_pack.data) self.assertRaises(AssertionError, bad_pack.check_length_and_checksum) - def test_checksum_mismatch(self): + def test_checksum_mismatch(self) -> None: with self.get_pack_data(pack1_sha) as data: index = self.get_pack_index(pack1_sha) Pack.from_objects(data, index).check_length_and_checksum() @@ -525,7 +526,7 @@ def test_checksum_mismatch(self): self.assertRaises(ChecksumMismatch, lambda: bad_pack.data) self.assertRaises(ChecksumMismatch, bad_pack.check_length_and_checksum) - def test_iterobjects_2(self): + def test_iterobjects_2(self) -> None: with self.get_pack(pack1_sha) as p: objs = {o.id: o for o in p.iterobjects()} self.assertEqual(3, len(objs)) @@ -534,7 +535,7 @@ def test_iterobjects_2(self): self.assertIsInstance(objs[tree_sha], Tree) self.assertIsInstance(objs[commit_sha], Commit) - def test_iterobjects_subset(self): + def test_iterobjects_subset(self) -> None: with self.get_pack(pack1_sha) as p: objs = {o.id: o for o in p.iterobjects_subset([commit_sha])} self.assertEqual(1, len(objs)) @@ -542,7 +543,7 @@ def test_iterobjects_subset(self): class TestThinPack(PackTests): - def setUp(self): + def setUp(self) -> None: super().setUp() self.store = MemoryObjectStore() self.blobs = {} @@ -583,13 +584,13 @@ def make_pack(self, resolve_ext_ref): resolve_ext_ref=self.store.get_raw if resolve_ext_ref else None, ) - def test_get_raw(self): + def test_get_raw(self) -> None: with self.make_pack(False) as p: self.assertRaises(KeyError, p.get_raw, self.blobs[b"foo1234"].id) with self.make_pack(True) as p: self.assertEqual((3, b"foo1234"), p.get_raw(self.blobs[b"foo1234"].id)) - def test_get_unpacked_object(self): + def test_get_unpacked_object(self) -> None: self.maxDiff = None with self.make_pack(False) as p: expected = UnpackedObject( @@ -613,7 +614,7 @@ def test_get_unpacked_object(self): got, ) - def test_iterobjects(self): + def test_iterobjects(self) -> None: with self.make_pack(False) as p: self.assertRaises(UnresolvedDeltas, list, p.iterobjects()) with self.make_pack(True) as p: @@ -630,12 +631,12 @@ def test_iterobjects(self): class WritePackTests(TestCase): - def test_write_pack_header(self): + def test_write_pack_header(self) -> None: f = BytesIO() write_pack_header(f.write, 42) self.assertEqual(b"PACK\x00\x00\x00\x02\x00\x00\x00*", f.getvalue()) - def test_write_pack_object(self): + def test_write_pack_object(self) -> None: f = BytesIO() f.write(b"header") offset = f.tell() @@ -651,7 +652,7 @@ def test_write_pack_object(self): self.assertEqual(crc32, unpacked.crc32) self.assertEqual(b"x", unused) - def test_write_pack_object_sha(self): + def test_write_pack_object_sha(self) -> None: f = BytesIO() f.write(b"header") offset = f.tell() @@ -662,7 +663,7 @@ def test_write_pack_object_sha(self): sha_b.update(f.getvalue()[offset:]) self.assertEqual(sha_a.digest(), sha_b.digest()) - def test_write_pack_object_compression_level(self): + def test_write_pack_object_compression_level(self) -> None: f = BytesIO() f.write(b"header") offset = f.tell() @@ -680,21 +681,21 @@ def test_write_pack_object_compression_level(self): class BaseTestPackIndexWriting: - def assertSucceeds(self, func, *args, **kwargs): + def assertSucceeds(self, func, *args, **kwargs) -> None: try: func(*args, **kwargs) except ChecksumMismatch as e: self.fail(e) - def index(self, filename, entries, pack_checksum): + def index(self, filename, entries, pack_checksum) -> NoReturn: raise NotImplementedError(self.index) - def test_empty(self): + def test_empty(self) -> None: idx = self.index("empty.idx", [], pack_checksum) self.assertEqual(idx.get_pack_checksum(), pack_checksum) self.assertEqual(0, len(idx)) - def test_large(self): + def test_large(self) -> None: entry1_sha = hex_to_sha("4e6388232ec39792661e2e75db8fb117fc869ce6") entry2_sha = hex_to_sha("e98f071751bd77f59967bfa671cd2caebdccc9a2") entries = [ @@ -721,7 +722,7 @@ def test_large(self): else: self.assertIsNone(actual_crc) - def test_single(self): + def test_single(self) -> None: entry_sha = hex_to_sha("6f670c0fb53f9463760b7295fbb814e965fb20c8") my_entries = [(entry_sha, 178, 42)] idx = self.index("single.idx", my_entries, pack_checksum) @@ -741,10 +742,10 @@ def test_single(self): class BaseTestFilePackIndexWriting(BaseTestPackIndexWriting): - def setUp(self): + def setUp(self) -> None: self.tempdir = tempfile.mkdtemp() - def tearDown(self): + def tearDown(self) -> None: shutil.rmtree(self.tempdir) def index(self, filename, entries, pack_checksum): @@ -755,14 +756,14 @@ def index(self, filename, entries, pack_checksum): self.assertEqual(idx.version, self._expected_version) return idx - def writeIndex(self, filename, entries, pack_checksum): + def writeIndex(self, filename, entries, pack_checksum) -> None: # FIXME: Write to BytesIO instead rather than hitting disk ? with GitFile(filename, "wb") as f: self._write_fn(f, entries, pack_checksum) class TestMemoryIndexWriting(TestCase, BaseTestPackIndexWriting): - def setUp(self): + def setUp(self) -> None: TestCase.setUp(self) self._has_crc32_checksum = True self._supports_large = True @@ -770,12 +771,12 @@ def setUp(self): def index(self, filename, entries, pack_checksum): return MemoryPackIndex(entries, pack_checksum) - def tearDown(self): + def tearDown(self) -> None: TestCase.tearDown(self) class TestPackIndexWritingv1(TestCase, BaseTestFilePackIndexWriting): - def setUp(self): + def setUp(self) -> None: TestCase.setUp(self) BaseTestFilePackIndexWriting.setUp(self) self._has_crc32_checksum = False @@ -783,13 +784,13 @@ def setUp(self): self._supports_large = False self._write_fn = write_pack_index_v1 - def tearDown(self): + def tearDown(self) -> None: TestCase.tearDown(self) BaseTestFilePackIndexWriting.tearDown(self) class TestPackIndexWritingv2(TestCase, BaseTestFilePackIndexWriting): - def setUp(self): + def setUp(self) -> None: TestCase.setUp(self) BaseTestFilePackIndexWriting.setUp(self) self._has_crc32_checksum = True @@ -797,7 +798,7 @@ def setUp(self): self._expected_version = 2 self._write_fn = write_pack_index_v2 - def tearDown(self): + def tearDown(self) -> None: TestCase.tearDown(self) BaseTestFilePackIndexWriting.tearDown(self) @@ -814,14 +815,14 @@ class ReadZlibTests(TestCase): comp = zlib.compress(decomp) extra = b"nextobject" - def setUp(self): + def setUp(self) -> None: super().setUp() self.read = BytesIO(self.comp + self.extra).read self.unpacked = UnpackedObject( Tree.type_num, decomp_len=len(self.decomp), crc32=0 ) - def test_decompress_size(self): + def test_decompress_size(self) -> None: good_decomp_len = len(self.decomp) self.unpacked.decomp_len = -1 self.assertRaises(ValueError, read_zlib_chunks, self.read, self.unpacked) @@ -830,14 +831,14 @@ def test_decompress_size(self): self.unpacked.decomp_len = good_decomp_len + 1 self.assertRaises(zlib.error, read_zlib_chunks, self.read, self.unpacked) - def test_decompress_truncated(self): + def test_decompress_truncated(self) -> None: read = BytesIO(self.comp[:10]).read self.assertRaises(zlib.error, read_zlib_chunks, read, self.unpacked) read = BytesIO(self.comp).read self.assertRaises(zlib.error, read_zlib_chunks, read, self.unpacked) - def test_decompress_empty(self): + def test_decompress_empty(self) -> None: unpacked = UnpackedObject(Tree.type_num, decomp_len=0) comp = zlib.compress(b"") read = BytesIO(comp + self.extra).read @@ -846,12 +847,12 @@ def test_decompress_empty(self): self.assertNotEqual(b"", unused) self.assertEqual(self.extra, unused + read()) - def test_decompress_no_crc32(self): + def test_decompress_no_crc32(self) -> None: self.unpacked.crc32 = None read_zlib_chunks(self.read, self.unpacked) self.assertEqual(None, self.unpacked.crc32) - def _do_decompress_test(self, buffer_size, **kwargs): + def _do_decompress_test(self, buffer_size, **kwargs) -> None: unused = read_zlib_chunks( self.read, self.unpacked, buffer_size=buffer_size, **kwargs ) @@ -860,34 +861,34 @@ def _do_decompress_test(self, buffer_size, **kwargs): self.assertNotEqual(b"", unused) self.assertEqual(self.extra, unused + self.read()) - def test_simple_decompress(self): + def test_simple_decompress(self) -> None: self._do_decompress_test(4096) self.assertEqual(None, self.unpacked.comp_chunks) # These buffer sizes are not intended to be realistic, but rather simulate # larger buffer sizes that may end at various places. - def test_decompress_buffer_size_1(self): + def test_decompress_buffer_size_1(self) -> None: self._do_decompress_test(1) - def test_decompress_buffer_size_2(self): + def test_decompress_buffer_size_2(self) -> None: self._do_decompress_test(2) - def test_decompress_buffer_size_3(self): + def test_decompress_buffer_size_3(self) -> None: self._do_decompress_test(3) - def test_decompress_buffer_size_4(self): + def test_decompress_buffer_size_4(self) -> None: self._do_decompress_test(4) - def test_decompress_include_comp(self): + def test_decompress_include_comp(self) -> None: self._do_decompress_test(4096, include_comp=True) self.assertEqual(self.comp, b"".join(self.unpacked.comp_chunks)) class DeltifyTests(TestCase): - def test_empty(self): + def test_empty(self) -> None: self.assertEqual([], list(deltify_pack_objects([]))) - def test_single(self): + def test_single(self) -> None: b = Blob.from_string(b"foo") self.assertEqual( [ @@ -901,7 +902,7 @@ def test_single(self): list(deltify_pack_objects([(b, b"")])), ) - def test_simple_delta(self): + def test_simple_delta(self) -> None: b1 = Blob.from_string(b"a" * 101) b2 = Blob.from_string(b"a" * 100) delta = list(create_delta(b1.as_raw_chunks(), b2.as_raw_chunks())) @@ -925,13 +926,13 @@ def test_simple_delta(self): class TestPackStreamReader(TestCase): - def test_read_objects_emtpy(self): + def test_read_objects_emtpy(self) -> None: f = BytesIO() build_pack(f, []) reader = PackStreamReader(f.read) self.assertEqual(0, len(list(reader.read_objects()))) - def test_read_objects(self): + def test_read_objects(self) -> None: f = BytesIO() entries = build_pack( f, @@ -964,7 +965,7 @@ def test_read_objects(self): self.assertEqual(b"".join(delta), b"".join(unpacked_delta.decomp_chunks)) self.assertEqual(entries[1][4], unpacked_delta.crc32) - def test_read_objects_buffered(self): + def test_read_objects_buffered(self) -> None: f = BytesIO() build_pack( f, @@ -976,7 +977,7 @@ def test_read_objects_buffered(self): reader = PackStreamReader(f.read, zlib_bufsize=4) self.assertEqual(2, len(list(reader.read_objects()))) - def test_read_objects_empty(self): + def test_read_objects_empty(self) -> None: reader = PackStreamReader(BytesIO().read) self.assertRaises(AssertionError, list, reader.read_objects()) @@ -1007,7 +1008,7 @@ def _resolve_object(self, offset, pack_type_num, base_chunks): class DeltaChainIteratorTests(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.store = MemoryObjectStore() self.fetched = set() @@ -1048,11 +1049,11 @@ def make_pack_iter_subset(self, f, subset, thin=None): pack, subset, resolve_ext_ref=resolve_ext_ref ) - def assertEntriesMatch(self, expected_indexes, entries, pack_iter): + def assertEntriesMatch(self, expected_indexes, entries, pack_iter) -> None: expected = [entries[i] for i in expected_indexes] self.assertEqual(expected, list(pack_iter._walk_all_chains())) - def test_no_deltas(self): + def test_no_deltas(self) -> None: f = BytesIO() entries = build_pack( f, @@ -1080,7 +1081,7 @@ def test_no_deltas(self): ), ) - def test_ofs_deltas(self): + def test_ofs_deltas(self) -> None: f = BytesIO() entries = build_pack( f, @@ -1099,7 +1100,7 @@ def test_ofs_deltas(self): self.make_pack_iter_subset(f, [entries[1][3], entries[2][3]]), ) - def test_ofs_deltas_chain(self): + def test_ofs_deltas_chain(self) -> None: f = BytesIO() entries = build_pack( f, @@ -1111,7 +1112,7 @@ def test_ofs_deltas_chain(self): ) self.assertEntriesMatch([0, 1, 2], entries, self.make_pack_iter(f)) - def test_ref_deltas(self): + def test_ref_deltas(self) -> None: f = BytesIO() entries = build_pack( f, @@ -1124,7 +1125,7 @@ def test_ref_deltas(self): # Delta resolution changed to DFS self.assertEntriesMatch([1, 2, 0], entries, self.make_pack_iter(f)) - def test_ref_deltas_chain(self): + def test_ref_deltas_chain(self) -> None: f = BytesIO() entries = build_pack( f, @@ -1136,7 +1137,7 @@ def test_ref_deltas_chain(self): ) self.assertEntriesMatch([1, 2, 0], entries, self.make_pack_iter(f)) - def test_ofs_and_ref_deltas(self): + def test_ofs_and_ref_deltas(self) -> None: # Deltas pending on this offset are popped before deltas depending on # this ref. f = BytesIO() @@ -1152,7 +1153,7 @@ def test_ofs_and_ref_deltas(self): # Delta resolution changed to DFS self.assertEntriesMatch([1, 0, 2], entries, self.make_pack_iter(f)) - def test_mixed_chain(self): + def test_mixed_chain(self) -> None: f = BytesIO() entries = build_pack( f, @@ -1167,7 +1168,7 @@ def test_mixed_chain(self): # Delta resolution changed to DFS self.assertEntriesMatch([0, 4, 2, 1, 3], entries, self.make_pack_iter(f)) - def test_long_chain(self): + def test_long_chain(self) -> None: n = 100 objects_spec = [(Blob.type_num, b"blob")] for i in range(n): @@ -1176,7 +1177,7 @@ def test_long_chain(self): entries = build_pack(f, objects_spec) self.assertEntriesMatch(range(n + 1), entries, self.make_pack_iter(f)) - def test_branchy_chain(self): + def test_branchy_chain(self) -> None: n = 100 objects_spec = [(Blob.type_num, b"blob")] for i in range(n): @@ -1187,7 +1188,7 @@ def test_branchy_chain(self): indices = [0, *list(range(100, 0, -1))] self.assertEntriesMatch(indices, entries, self.make_pack_iter(f)) - def test_ext_ref(self): + def test_ext_ref(self) -> None: (blob,) = self.store_blobs([b"blob"]) f = BytesIO() entries = build_pack(f, [(REF_DELTA, (blob.id, b"blob1"))], store=self.store) @@ -1195,7 +1196,7 @@ def test_ext_ref(self): self.assertEntriesMatch([0], entries, pack_iter) self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs()) - def test_ext_ref_chain(self): + def test_ext_ref_chain(self) -> None: (blob,) = self.store_blobs([b"blob"]) f = BytesIO() entries = build_pack( @@ -1210,7 +1211,7 @@ def test_ext_ref_chain(self): self.assertEntriesMatch([1, 0], entries, pack_iter) self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs()) - def test_ext_ref_chain_degenerate(self): + def test_ext_ref_chain_degenerate(self) -> None: # Test a degenerate case where the sender is sending a REF_DELTA # object that expands to an object already in the repository. (blob,) = self.store_blobs([b"blob"]) @@ -1230,7 +1231,7 @@ def test_ext_ref_chain_degenerate(self): self.assertEntriesMatch([0, 1], entries, pack_iter) self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs()) - def test_ext_ref_multiple_times(self): + def test_ext_ref_multiple_times(self) -> None: (blob,) = self.store_blobs([b"blob"]) f = BytesIO() entries = build_pack( @@ -1245,7 +1246,7 @@ def test_ext_ref_multiple_times(self): self.assertEntriesMatch([0, 1], entries, pack_iter) self.assertEqual([hex_to_sha(blob.id)], pack_iter.ext_refs()) - def test_multiple_ext_refs(self): + def test_multiple_ext_refs(self) -> None: b1, b2 = self.store_blobs([b"foo", b"bar"]) f = BytesIO() entries = build_pack( @@ -1260,7 +1261,7 @@ def test_multiple_ext_refs(self): self.assertEntriesMatch([0, 1], entries, pack_iter) self.assertEqual([hex_to_sha(b1.id), hex_to_sha(b2.id)], pack_iter.ext_refs()) - def test_bad_ext_ref_non_thin_pack(self): + def test_bad_ext_ref_non_thin_pack(self) -> None: (blob,) = self.store_blobs([b"blob"]) f = BytesIO() build_pack(f, [(REF_DELTA, (blob.id, b"blob1"))], store=self.store) @@ -1271,7 +1272,7 @@ def test_bad_ext_ref_non_thin_pack(self): except UnresolvedDeltas as e: self.assertEqual([blob.id], e.shas) - def test_bad_ext_ref_thin_pack(self): + def test_bad_ext_ref_thin_pack(self) -> None: b1, b2, b3 = self.store_blobs([b"foo", b"bar", b"baz"]) f = BytesIO() build_pack( @@ -1293,7 +1294,7 @@ def test_bad_ext_ref_thin_pack(self): except UnresolvedDeltas as e: self.assertEqual((sorted([b2.id, b3.id]),), (sorted(e.shas),)) - def test_ext_ref_deltified_object_based_on_itself(self): + def test_ext_ref_deltified_object_based_on_itself(self) -> None: b1_content = b"foo" (b1,) = self.store_blobs([b1_content]) f = BytesIO() @@ -1323,7 +1324,7 @@ def test_ext_ref_deltified_object_based_on_itself(self): class DeltaEncodeSizeTests(TestCase): - def test_basic(self): + def test_basic(self) -> None: self.assertEqual(b"\x00", _delta_encode_size(0)) self.assertEqual(b"\x01", _delta_encode_size(1)) self.assertEqual(b"\xfa\x01", _delta_encode_size(250)) @@ -1332,7 +1333,7 @@ def test_basic(self): class EncodeCopyOperationTests(TestCase): - def test_basic(self): + def test_basic(self) -> None: self.assertEqual(b"\x80", _encode_copy_operation(0, 0)) self.assertEqual(b"\x91\x01\x0a", _encode_copy_operation(1, 10)) self.assertEqual(b"\xb1\x64\xe8\x03", _encode_copy_operation(100, 1000)) diff --git a/tests/test_patch.py b/tests/test_patch.py index 854cb0e23..65db9505e 100644 --- a/tests/test_patch.py +++ b/tests/test_patch.py @@ -21,6 +21,7 @@ """Tests for patch.py.""" from io import BytesIO, StringIO +from typing import NoReturn from dulwich.object_store import MemoryObjectStore from dulwich.objects import S_IFGITLINK, Blob, Commit, Tree @@ -37,7 +38,7 @@ class WriteCommitPatchTests(TestCase): - def test_simple_bytesio(self): + def test_simple_bytesio(self) -> None: f = BytesIO() c = Commit() c.committer = c.author = b"Jelmer " @@ -70,7 +71,7 @@ def test_simple_bytesio(self): class ReadGitAmPatch(TestCase): - def test_extract_string(self): + def test_extract_string(self) -> None: text = b"""\ From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001 From: Jelmer Vernooij @@ -102,7 +103,7 @@ def test_extract_string(self): ) self.assertEqual(b"1.7.0.4", version) - def test_extract_bytes(self): + def test_extract_bytes(self) -> None: text = b"""\ From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001 From: Jelmer Vernooij @@ -134,7 +135,7 @@ def test_extract_bytes(self): ) self.assertEqual(b"1.7.0.4", version) - def test_extract_spaces(self): + def test_extract_spaces(self) -> None: text = b"""From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001 From: Jelmer Vernooij Date: Thu, 15 Apr 2010 15:40:28 +0200 @@ -164,7 +165,7 @@ def test_extract_spaces(self): c.message, ) - def test_extract_pseudo_from_header(self): + def test_extract_pseudo_from_header(self) -> None: text = b"""From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001 From: Jelmer Vernooij Date: Thu, 15 Apr 2010 15:40:28 +0200 @@ -197,7 +198,7 @@ def test_extract_pseudo_from_header(self): c.message, ) - def test_extract_no_version_tail(self): + def test_extract_no_version_tail(self) -> None: text = b"""\ From ff643aae102d8870cac88e8f007e70f58f3a7363 Mon Sep 17 00:00:00 2001 From: Jelmer Vernooij @@ -216,7 +217,7 @@ def test_extract_no_version_tail(self): c, diff, version = git_am_patch_split(BytesIO(text), "utf-8") self.assertEqual(None, version) - def test_extract_mercurial(self): + def test_extract_mercurial(self) -> NoReturn: raise SkipTest( "git_am_patch_split doesn't handle Mercurial patches " "properly yet" ) @@ -262,7 +263,7 @@ class DiffTests(TestCase): class DiffTests(TestCase): """Tests for write_blob_diff and write_tree_diff.""" - def test_blob_diff(self): + def test_blob_diff(self) -> None: f = BytesIO() write_blob_diff( f, @@ -283,7 +284,7 @@ def test_blob_diff(self): f.getvalue().splitlines(), ) - def test_blob_add(self): + def test_blob_add(self) -> None: f = BytesIO() write_blob_diff( f, @@ -304,7 +305,7 @@ def test_blob_add(self): f.getvalue().splitlines(), ) - def test_blob_remove(self): + def test_blob_remove(self) -> None: f = BytesIO() write_blob_diff( f, @@ -325,7 +326,7 @@ def test_blob_remove(self): f.getvalue().splitlines(), ) - def test_tree_diff(self): + def test_tree_diff(self) -> None: f = BytesIO() store = MemoryObjectStore() added = Blob.from_string(b"add\n") @@ -384,7 +385,7 @@ def test_tree_diff(self): f.getvalue().splitlines(), ) - def test_tree_diff_submodule(self): + def test_tree_diff_submodule(self) -> None: f = BytesIO() store = MemoryObjectStore() tree1 = Tree() @@ -414,7 +415,7 @@ def test_tree_diff_submodule(self): f.getvalue().splitlines(), ) - def test_object_diff_blob(self): + def test_object_diff_blob(self) -> None: f = BytesIO() b1 = Blob.from_string(b"old\nsame\n") b2 = Blob.from_string(b"new\nsame\n") @@ -437,7 +438,7 @@ def test_object_diff_blob(self): f.getvalue().splitlines(), ) - def test_object_diff_add_blob(self): + def test_object_diff_add_blob(self) -> None: f = BytesIO() store = MemoryObjectStore() b2 = Blob.from_string(b"new\nsame\n") @@ -457,7 +458,7 @@ def test_object_diff_add_blob(self): f.getvalue().splitlines(), ) - def test_object_diff_remove_blob(self): + def test_object_diff_remove_blob(self) -> None: f = BytesIO() b1 = Blob.from_string(b"new\nsame\n") store = MemoryObjectStore() @@ -477,7 +478,7 @@ def test_object_diff_remove_blob(self): f.getvalue().splitlines(), ) - def test_object_diff_bin_blob_force(self): + def test_object_diff_bin_blob_force(self) -> None: f = BytesIO() # Prepare two slightly different PNG headers b1 = Blob.from_string( @@ -521,7 +522,7 @@ def test_object_diff_bin_blob_force(self): f.getvalue().splitlines(), ) - def test_object_diff_bin_blob(self): + def test_object_diff_bin_blob(self) -> None: f = BytesIO() # Prepare two slightly different PNG headers b1 = Blob.from_string( @@ -550,7 +551,7 @@ def test_object_diff_bin_blob(self): f.getvalue().splitlines(), ) - def test_object_diff_add_bin_blob(self): + def test_object_diff_add_bin_blob(self) -> None: f = BytesIO() b2 = Blob.from_string( b"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a" @@ -571,7 +572,7 @@ def test_object_diff_add_bin_blob(self): f.getvalue().splitlines(), ) - def test_object_diff_remove_bin_blob(self): + def test_object_diff_remove_bin_blob(self) -> None: f = BytesIO() b1 = Blob.from_string( b"\x89\x50\x4e\x47\x0d\x0a\x1a\x0a" @@ -592,7 +593,7 @@ def test_object_diff_remove_bin_blob(self): f.getvalue().splitlines(), ) - def test_object_diff_kind_change(self): + def test_object_diff_kind_change(self) -> None: f = BytesIO() b1 = Blob.from_string(b"new\nsame\n") store = MemoryObjectStore() @@ -625,7 +626,7 @@ def test_object_diff_kind_change(self): class GetSummaryTests(TestCase): - def test_simple(self): + def test_simple(self) -> None: c = Commit() c.committer = c.author = b"Jelmer " c.commit_time = c.author_time = 1271350201 diff --git a/tests/test_porcelain.py b/tests/test_porcelain.py index 8bbd6dd52..5b7db0c8d 100644 --- a/tests/test_porcelain.py +++ b/tests/test_porcelain.py @@ -66,7 +66,7 @@ def flat_walk_dir(dir_to_walk): class PorcelainTestCase(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.test_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.test_dir) @@ -74,7 +74,7 @@ def setUp(self): self.repo = Repo.init(self.repo_path, mkdir=True) self.addCleanup(self.repo.close) - def assertRecentTimestamp(self, ts): + def assertRecentTimestamp(self, ts) -> None: # On some slow CIs it does actually take more than 5 seconds to go from # creating the tag to here. self.assertLess(time.time() - ts, 50) @@ -254,7 +254,7 @@ class PorcelainGpgTestCase(PorcelainTestCase): NON_DEFAULT_KEY_ID = "6A93393F50C5E6ACD3D6FB45B936212EDB4E14C0" - def setUp(self): + def setUp(self) -> None: super().setUp() self.gpg_dir = os.path.join(self.test_dir, "gpg") os.mkdir(self.gpg_dir, mode=0o700) @@ -264,7 +264,7 @@ def setUp(self): self.addCleanup(shutil.rmtree, self.gpg_dir, ignore_errors=True) self.overrideEnv("GNUPGHOME", self.gpg_dir) - def import_default_key(self): + def import_default_key(self) -> None: subprocess.run( ["gpg", "--import"], stdout=subprocess.DEVNULL, @@ -273,7 +273,7 @@ def import_default_key(self): text=True, ) - def import_non_default_key(self): + def import_non_default_key(self) -> None: subprocess.run( ["gpg", "--import"], stdout=subprocess.DEVNULL, @@ -286,7 +286,7 @@ def import_non_default_key(self): class ArchiveTests(PorcelainTestCase): """Tests for the archive command.""" - def test_simple(self): + def test_simple(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -303,7 +303,7 @@ def test_simple(self): class UpdateServerInfoTests(PorcelainTestCase): - def test_simple(self): + def test_simple(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -315,7 +315,7 @@ def test_simple(self): class CommitTests(PorcelainTestCase): - def test_custom_author(self): + def test_custom_author(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -329,7 +329,7 @@ def test_custom_author(self): self.assertIsInstance(sha, bytes) self.assertEqual(len(sha), 40) - def test_unicode(self): + def test_unicode(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -343,7 +343,7 @@ def test_unicode(self): self.assertIsInstance(sha, bytes) self.assertEqual(len(sha), 40) - def test_no_verify(self): + def test_no_verify(self) -> None: if os.name != "posix": self.skipTest("shell hook tests requires POSIX shell") self.assertTrue(os.path.exists("/bin/sh")) @@ -397,7 +397,7 @@ def test_no_verify(self): self.assertIsInstance(sha, bytes) self.assertEqual(len(sha), 40) - def test_timezone(self): + def test_timezone(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -457,7 +457,7 @@ def test_timezone(self): "gpgme not easily available or supported on Windows and PyPy", ) class CommitSignTests(PorcelainGpgTestCase): - def test_default_key(self): + def test_default_key(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -494,7 +494,7 @@ def test_default_key(self): commit.verify, ) - def test_non_default_key(self): + def test_non_default_key(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -519,26 +519,26 @@ def test_non_default_key(self): class TimezoneTests(PorcelainTestCase): - def put_envs(self, value): + def put_envs(self, value) -> None: self.overrideEnv("GIT_AUTHOR_DATE", value) self.overrideEnv("GIT_COMMITTER_DATE", value) - def fallback(self, value): + def fallback(self, value) -> None: self.put_envs(value) self.assertRaises(porcelain.TimezoneFormatError, porcelain.get_user_timezones) - def test_internal_format(self): + def test_internal_format(self) -> None: self.put_envs("0 +0500") self.assertTupleEqual((18000, 18000), porcelain.get_user_timezones()) - def test_rfc_2822(self): + def test_rfc_2822(self) -> None: self.put_envs("Mon, 20 Nov 1995 19:12:08 -0500") self.assertTupleEqual((-18000, -18000), porcelain.get_user_timezones()) self.put_envs("Mon, 20 Nov 1995 19:12:08") self.assertTupleEqual((0, 0), porcelain.get_user_timezones()) - def test_iso8601(self): + def test_iso8601(self) -> None: self.put_envs("1995-11-20T19:12:08-0501") self.assertTupleEqual((-18060, -18060), porcelain.get_user_timezones()) @@ -555,7 +555,7 @@ def test_iso8601(self): self.put_envs("2006-07-03 17:18:44 +0200") self.assertTupleEqual((7200, 7200), porcelain.get_user_timezones()) - def test_missing_or_malformed(self): + def test_missing_or_malformed(self) -> None: # TODO: add more here self.fallback("0 + 0500") self.fallback("a +0500") @@ -567,12 +567,12 @@ def test_missing_or_malformed(self): self.fallback("11/20/1995") self.fallback("20.11.1995") - def test_different_envs(self): + def test_different_envs(self) -> None: self.overrideEnv("GIT_AUTHOR_DATE", "0 +0500") self.overrideEnv("GIT_COMMITTER_DATE", "0 +0501") self.assertTupleEqual((18000, 18060), porcelain.get_user_timezones()) - def test_no_envs(self): + def test_no_envs(self) -> None: local_timezone = time.localtime().tm_gmtoff self.put_envs("0 +0500") @@ -594,7 +594,7 @@ def test_no_envs(self): class CleanTests(PorcelainTestCase): - def put_files(self, tracked, ignored, untracked, empty_dirs): + def put_files(self, tracked, ignored, untracked, empty_dirs) -> None: """Put the described files in the wd.""" all_files = tracked | ignored | untracked for file_path in all_files: @@ -619,7 +619,7 @@ def put_files(self, tracked, ignored, untracked, empty_dirs): porcelain.add(repo=self.repo.path, paths=files_to_add) porcelain.commit(repo=self.repo.path, message="init commit") - def assert_wd(self, expected_paths): + def assert_wd(self, expected_paths) -> None: """Assert paths of files and dirs in wd are same as expected_paths.""" control_dir_rel = os.path.relpath(self.repo._controldir, self.repo.path) @@ -632,7 +632,7 @@ def assert_wd(self, expected_paths): norm_expected_paths = {os.path.normpath(p) for p in expected_paths} self.assertEqual(found_paths, norm_expected_paths) - def test_from_root(self): + def test_from_root(self) -> None: self.put_files( tracked={"tracked_file", "tracked_dir/tracked_file", ".gitignore"}, ignored={"ignored_file"}, @@ -656,7 +656,7 @@ def test_from_root(self): } ) - def test_from_subdir(self): + def test_from_subdir(self) -> None: self.put_files( tracked={"tracked_file", "tracked_dir/tracked_file", ".gitignore"}, ignored={"ignored_file"}, @@ -690,7 +690,7 @@ def test_from_subdir(self): class CloneTests(PorcelainTestCase): - def test_simple_local(self): + def test_simple_local(self) -> None: f1_1 = make_object(Blob, data=b"f1") commit_spec = [[1], [2, 1], [3, 1, 2]] trees = { @@ -725,7 +725,7 @@ def test_simple_local(self): c.get((b"remote", b"origin"), b"fetch"), ) - def test_simple_local_with_checkout(self): + def test_simple_local_with_checkout(self) -> None: f1_1 = make_object(Blob, data=b"f1") commit_spec = [[1], [2, 1], [3, 1, 2]] trees = { @@ -748,7 +748,7 @@ def test_simple_local_with_checkout(self): self.assertIn("f1", os.listdir(target_path)) self.assertIn("f2", os.listdir(target_path)) - def test_bare_local_with_checkout(self): + def test_bare_local_with_checkout(self) -> None: f1_1 = make_object(Blob, data=b"f1") commit_spec = [[1], [2, 1], [3, 1, 2]] trees = { @@ -772,7 +772,7 @@ def test_bare_local_with_checkout(self): self.assertNotIn(b"f1", os.listdir(target_path)) self.assertNotIn(b"f2", os.listdir(target_path)) - def test_no_checkout_with_bare(self): + def test_no_checkout_with_bare(self) -> None: f1_1 = make_object(Blob, data=b"f1") commit_spec = [[1]] trees = {1: [(b"f1", f1_1), (b"f2", f1_1)]} @@ -793,7 +793,7 @@ def test_no_checkout_with_bare(self): errstream=errstream, ) - def test_no_head_no_checkout(self): + def test_no_head_no_checkout(self) -> None: f1_1 = make_object(Blob, data=b"f1") commit_spec = [[1]] trees = {1: [(b"f1", f1_1), (b"f2", f1_1)]} @@ -808,7 +808,7 @@ def test_no_head_no_checkout(self): ) r.close() - def test_no_head_no_checkout_outstream_errstream_autofallback(self): + def test_no_head_no_checkout_outstream_errstream_autofallback(self) -> None: f1_1 = make_object(Blob, data=b"f1") commit_spec = [[1]] trees = {1: [(b"f1", f1_1), (b"f2", f1_1)]} @@ -823,7 +823,7 @@ def test_no_head_no_checkout_outstream_errstream_autofallback(self): ) r.close() - def test_source_broken(self): + def test_source_broken(self) -> None: with tempfile.TemporaryDirectory() as parent: target_path = os.path.join(parent, "target") self.assertRaises( @@ -831,7 +831,7 @@ def test_source_broken(self): ) self.assertFalse(os.path.exists(target_path)) - def test_fetch_symref(self): + def test_fetch_symref(self) -> None: f1_1 = make_object(Blob, data=b"f1") trees = {1: [(b"f1", f1_1), (b"f2", f1_1)]} [c1] = build_commit_graph(self.repo.object_store, [[1]], trees) @@ -857,7 +857,7 @@ def test_fetch_symref(self): target_repo.refs.get_symrefs(), ) - def test_detached_head(self): + def test_detached_head(self) -> None: f1_1 = make_object(Blob, data=b"f1") commit_spec = [[1], [2, 1], [3, 1, 2]] trees = { @@ -880,19 +880,19 @@ def test_detached_head(self): class InitTests(TestCase): - def test_non_bare(self): + def test_non_bare(self) -> None: repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) porcelain.init(repo_dir) - def test_bare(self): + def test_bare(self) -> None: repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, repo_dir) porcelain.init(repo_dir, bare=True) class AddTests(PorcelainTestCase): - def test_add_default_paths(self): + def test_add_default_paths(self) -> None: # create a file for initial commit fullpath = os.path.join(self.repo.path, "blah") with open(fullpath, "w") as f: @@ -926,7 +926,7 @@ def test_add_default_paths(self): index = self.repo.open_index() self.assertEqual(sorted(index), [b"adir/afile", b"blah", b"foo"]) - def test_add_default_paths_subdir(self): + def test_add_default_paths_subdir(self) -> None: os.mkdir(os.path.join(self.repo.path, "foo")) with open(os.path.join(self.repo.path, "blah"), "w") as f: f.write("\n") @@ -949,14 +949,14 @@ def test_add_default_paths_subdir(self): index = self.repo.open_index() self.assertEqual(sorted(index), [b"foo/blie"]) - def test_add_file(self): + def test_add_file(self) -> None: fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") porcelain.add(self.repo.path, paths=[fullpath]) self.assertIn(b"foo", self.repo.open_index()) - def test_add_ignored(self): + def test_add_ignored(self) -> None: with open(os.path.join(self.repo.path, ".gitignore"), "w") as f: f.write("foo\nsubdir/") with open(os.path.join(self.repo.path, "foo"), "w") as f: @@ -978,14 +978,14 @@ def test_add_ignored(self): self.assertEqual({"bar"}, set(added)) self.assertEqual({"foo", os.path.join("subdir", "")}, ignored) - def test_add_file_absolute_path(self): + def test_add_file_absolute_path(self) -> None: # Absolute paths are (not yet) supported with open(os.path.join(self.repo.path, "foo"), "w") as f: f.write("BAR") porcelain.add(self.repo, paths=[os.path.join(self.repo.path, "foo")]) self.assertIn(b"foo", self.repo.open_index()) - def test_add_not_in_repo(self): + def test_add_not_in_repo(self) -> None: with open(os.path.join(self.test_dir, "foo"), "w") as f: f.write("BAR") self.assertRaises( @@ -1002,7 +1002,7 @@ def test_add_not_in_repo(self): ) self.assertEqual([], list(self.repo.open_index())) - def test_add_file_clrf_conversion(self): + def test_add_file_clrf_conversion(self) -> None: # Set the right configuration to the repo c = self.repo.get_config() c.set("core", "autocrlf", "input") @@ -1024,7 +1024,7 @@ def test_add_file_clrf_conversion(self): class RemoveTests(PorcelainTestCase): - def test_remove_file(self): + def test_remove_file(self) -> None: fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") @@ -1044,7 +1044,7 @@ def test_remove_file(self): os.chdir(cwd) self.assertFalse(os.path.exists(os.path.join(self.repo.path, "foo"))) - def test_remove_file_staged(self): + def test_remove_file_staged(self) -> None: fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") @@ -1056,7 +1056,7 @@ def test_remove_file_staged(self): finally: os.chdir(cwd) - def test_remove_file_removed_on_disk(self): + def test_remove_file_removed_on_disk(self) -> None: fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") @@ -1072,7 +1072,7 @@ def test_remove_file_removed_on_disk(self): class LogTests(PorcelainTestCase): - def test_simple(self): + def test_simple(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -1081,7 +1081,7 @@ def test_simple(self): porcelain.log(self.repo.path, outstream=outstream) self.assertEqual(3, outstream.getvalue().count("-" * 50)) - def test_max_entries(self): + def test_max_entries(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -1092,7 +1092,7 @@ def test_max_entries(self): class ShowTests(PorcelainTestCase): - def test_nolist(self): + def test_nolist(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -1101,7 +1101,7 @@ def test_nolist(self): porcelain.show(self.repo.path, objects=c3.id, outstream=outstream) self.assertTrue(outstream.getvalue().startswith("-" * 50)) - def test_simple(self): + def test_simple(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -1110,14 +1110,14 @@ def test_simple(self): porcelain.show(self.repo.path, objects=[c3.id], outstream=outstream) self.assertTrue(outstream.getvalue().startswith("-" * 50)) - def test_blob(self): + def test_blob(self) -> None: b = Blob.from_string(b"The Foo\n") self.repo.object_store.add_object(b) outstream = StringIO() porcelain.show(self.repo.path, objects=[b.id], outstream=outstream) self.assertEqual(outstream.getvalue(), "The Foo\n") - def test_commit_no_parent(self): + def test_commit_no_parent(self) -> None: a = Blob.from_string(b"The Foo\n") ta = Tree() ta.add(b"somename", 0o100644, a.id) @@ -1146,7 +1146,7 @@ def test_commit_no_parent(self): """, ) - def test_tag(self): + def test_tag(self) -> None: a = Blob.from_string(b"The Foo\n") ta = Tree() ta.add(b"somename", 0o100644, a.id) @@ -1191,7 +1191,7 @@ def test_tag(self): """, ) - def test_commit_with_change(self): + def test_commit_with_change(self) -> None: a = Blob.from_string(b"The Foo\n") ta = Tree() ta.add(b"somename", 0o100644, a.id) @@ -1235,7 +1235,7 @@ def test_commit_with_change(self): class SymbolicRefTests(PorcelainTestCase): - def test_set_wrong_symbolic_ref(self): + def test_set_wrong_symbolic_ref(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -1245,7 +1245,7 @@ def test_set_wrong_symbolic_ref(self): porcelain.Error, porcelain.symbolic_ref, self.repo.path, b"foobar" ) - def test_set_force_wrong_symbolic_ref(self): + def test_set_force_wrong_symbolic_ref(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -1258,7 +1258,7 @@ def test_set_force_wrong_symbolic_ref(self): new_ref = f.read() self.assertEqual(new_ref, b"ref: refs/heads/force_foobar\n") - def test_set_symbolic_ref(self): + def test_set_symbolic_ref(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -1266,7 +1266,7 @@ def test_set_symbolic_ref(self): porcelain.symbolic_ref(self.repo.path, b"master") - def test_set_symbolic_ref_other_than_master(self): + def test_set_symbolic_ref_other_than_master(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]], @@ -1284,7 +1284,7 @@ def test_set_symbolic_ref_other_than_master(self): class DiffTreeTests(PorcelainTestCase): - def test_empty(self): + def test_empty(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -1295,7 +1295,7 @@ def test_empty(self): class CommitTreeTests(PorcelainTestCase): - def test_simple(self): + def test_simple(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -1317,7 +1317,7 @@ def test_simple(self): class RevListTests(PorcelainTestCase): - def test_simple(self): + def test_simple(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -1333,7 +1333,7 @@ def test_simple(self): "gpgme not easily available or supported on Windows and PyPy", ) class TagCreateSignTests(PorcelainGpgTestCase): - def test_default_key(self): + def test_default_key(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -1376,7 +1376,7 @@ def test_default_key(self): tag.verify, ) - def test_non_default_key(self): + def test_non_default_key(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -1407,7 +1407,7 @@ def test_non_default_key(self): class TagCreateTests(PorcelainTestCase): - def test_annotated(self): + def test_annotated(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -1429,7 +1429,7 @@ def test_annotated(self): self.assertEqual(b"bar\n", tag.message) self.assertRecentTimestamp(tag.tag_time) - def test_unannotated(self): + def test_unannotated(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -1442,7 +1442,7 @@ def test_unannotated(self): self.repo[b"refs/tags/tryme"] self.assertEqual(list(tags.values()), [self.repo.head()]) - def test_unannotated_unicode(self): + def test_unannotated_unicode(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -1457,11 +1457,11 @@ def test_unannotated_unicode(self): class TagListTests(PorcelainTestCase): - def test_empty(self): + def test_empty(self) -> None: tags = porcelain.tag_list(self.repo.path) self.assertEqual([], tags) - def test_simple(self): + def test_simple(self) -> None: self.repo.refs[b"refs/tags/foo"] = b"aa" * 20 self.repo.refs[b"refs/tags/bar/bla"] = b"bb" * 20 tags = porcelain.tag_list(self.repo.path) @@ -1470,7 +1470,7 @@ def test_simple(self): class TagDeleteTests(PorcelainTestCase): - def test_simple(self): + def test_simple(self) -> None: [c1] = build_commit_graph(self.repo.object_store, [[1]]) self.repo[b"HEAD"] = c1.id porcelain.tag_create(self.repo, b"foo") @@ -1480,7 +1480,7 @@ def test_simple(self): class ResetTests(PorcelainTestCase): - def test_hard_head(self): + def test_hard_head(self) -> None: fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") @@ -1508,7 +1508,7 @@ def test_hard_head(self): self.assertEqual([], changes) - def test_hard_commit(self): + def test_hard_commit(self) -> None: fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") @@ -1545,7 +1545,7 @@ def test_hard_commit(self): class ResetFileTests(PorcelainTestCase): - def test_reset_modify_file_to_commit(self): + def test_reset_modify_file_to_commit(self) -> None: file = "foo" full_path = os.path.join(self.repo.path, file) @@ -1565,7 +1565,7 @@ def test_reset_modify_file_to_commit(self): with open(full_path) as f: self.assertEqual("hello", f.read()) - def test_reset_remove_file_to_commit(self): + def test_reset_remove_file_to_commit(self) -> None: file = "foo" full_path = os.path.join(self.repo.path, file) @@ -1584,7 +1584,7 @@ def test_reset_remove_file_to_commit(self): with open(full_path) as f: self.assertEqual("hello", f.read()) - def test_resetfile_with_dir(self): + def test_resetfile_with_dir(self) -> None: os.mkdir(os.path.join(self.repo.path, "new_dir")) full_path = os.path.join(self.repo.path, "new_dir", "foo") @@ -1628,19 +1628,19 @@ def _commit_file_with_content(repo, filename, content): class CheckoutTests(PorcelainTestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self._sha, self._foo_path = _commit_file_with_content( self.repo, "foo", "hello\n" ) porcelain.branch_create(self.repo, "uni") - def test_checkout_to_existing_branch(self): + def test_checkout_to_existing_branch(self) -> None: self.assertEqual(b"master", porcelain.active_branch(self.repo)) porcelain.checkout_branch(self.repo, b"uni") self.assertEqual(b"uni", porcelain.active_branch(self.repo)) - def test_checkout_to_non_existing_branch(self): + def test_checkout_to_non_existing_branch(self) -> None: self.assertEqual(b"master", porcelain.active_branch(self.repo)) with self.assertRaises(KeyError): @@ -1648,7 +1648,7 @@ def test_checkout_to_non_existing_branch(self): self.assertEqual(b"master", porcelain.active_branch(self.repo)) - def test_checkout_to_branch_with_modified_files(self): + def test_checkout_to_branch_with_modified_files(self) -> None: with open(self._foo_path, "a") as f: f.write("new message\n") porcelain.add(self.repo, paths=[self._foo_path]) @@ -1667,7 +1667,7 @@ def test_checkout_to_branch_with_modified_files(self): [{"add": [], "delete": [], "modify": [b"foo"]}, [], []], status ) - def test_checkout_with_deleted_files(self): + def test_checkout_with_deleted_files(self) -> None: porcelain.remove(self.repo.path, [os.path.join(self.repo.path, "foo")]) status = list(porcelain.status(self.repo)) self.assertEqual( @@ -1683,7 +1683,7 @@ def test_checkout_with_deleted_files(self): [{"add": [], "delete": [b"foo"], "modify": []}, [], []], status ) - def test_checkout_to_branch_with_added_files(self): + def test_checkout_to_branch_with_added_files(self) -> None: file_path = os.path.join(self.repo.path, "bar") with open(file_path, "w") as f: @@ -1703,7 +1703,7 @@ def test_checkout_to_branch_with_added_files(self): [{"add": [b"bar"], "delete": [], "modify": []}, [], []], status ) - def test_checkout_to_branch_with_modified_file_not_present(self): + def test_checkout_to_branch_with_modified_file_not_present(self) -> None: # Commit a new file that the other branch doesn't have. _, nee_path = _commit_file_with_content(self.repo, "nee", "Good content\n") @@ -1727,7 +1727,7 @@ def test_checkout_to_branch_with_modified_file_not_present(self): [{"add": [], "delete": [], "modify": [b"nee"]}, [], []], status ) - def test_checkout_to_branch_with_modified_file_not_present_forced(self): + def test_checkout_to_branch_with_modified_file_not_present_forced(self) -> None: # Commit a new file that the other branch doesn't have. _, nee_path = _commit_file_with_content(self.repo, "nee", "Good content\n") @@ -1748,7 +1748,7 @@ def test_checkout_to_branch_with_modified_file_not_present_forced(self): status = list(porcelain.status(self.repo)) self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status) - def test_checkout_to_branch_with_unstaged_files(self): + def test_checkout_to_branch_with_unstaged_files(self) -> None: # Edit `foo`. with open(self._foo_path, "a") as f: f.write("new message") @@ -1765,7 +1765,7 @@ def test_checkout_to_branch_with_unstaged_files(self): [{"add": [], "delete": [], "modify": []}, [b"foo"], []], status ) - def test_checkout_to_branch_with_untracked_files(self): + def test_checkout_to_branch_with_untracked_files(self) -> None: with open(os.path.join(self.repo.path, "neu"), "a") as f: f.write("new message\n") @@ -1777,7 +1777,7 @@ def test_checkout_to_branch_with_untracked_files(self): status = list(porcelain.status(self.repo)) self.assertEqual([{"add": [], "delete": [], "modify": []}, [], ["neu"]], status) - def test_checkout_to_branch_with_new_files(self): + def test_checkout_to_branch_with_new_files(self) -> None: porcelain.checkout_branch(self.repo, b"uni") sub_directory = os.path.join(self.repo.path, "sub1") os.mkdir(sub_directory) @@ -1804,7 +1804,7 @@ def test_checkout_to_branch_with_new_files(self): status = list(porcelain.status(self.repo)) self.assertEqual([{"add": [], "delete": [], "modify": []}, [], []], status) - def test_checkout_to_branch_with_file_in_sub_directory(self): + def test_checkout_to_branch_with_file_in_sub_directory(self) -> None: sub_directory = os.path.join(self.repo.path, "sub1", "sub2") os.makedirs(sub_directory) @@ -1838,7 +1838,7 @@ def test_checkout_to_branch_with_file_in_sub_directory(self): self.assertTrue(os.path.isdir(sub_directory)) self.assertTrue(os.path.isdir(os.path.dirname(sub_directory))) - def test_checkout_to_branch_with_multiple_files_in_sub_directory(self): + def test_checkout_to_branch_with_multiple_files_in_sub_directory(self) -> None: sub_directory = os.path.join(self.repo.path, "sub1", "sub2") os.makedirs(sub_directory) @@ -1883,13 +1883,13 @@ def _commit_something_wrong(self): author=b"John ", ) - def test_checkout_to_commit_sha(self): + def test_checkout_to_commit_sha(self) -> None: self._commit_something_wrong() porcelain.checkout_branch(self.repo, self._sha) self.assertEqual(self._sha, self.repo.head()) - def test_checkout_to_head(self): + def test_checkout_to_head(self) -> None: new_sha = self._commit_something_wrong() porcelain.checkout_branch(self.repo, b"HEAD") @@ -1965,11 +1965,11 @@ def _checkout_remote_branch(self): return target_repo - def test_checkout_remote_branch(self): + def test_checkout_remote_branch(self) -> None: repo = self._checkout_remote_branch() repo.close() - def test_checkout_remote_branch_then_master_then_remote_branch_again(self): + def test_checkout_remote_branch_then_master_then_remote_branch_again(self) -> None: target_repo = self._checkout_remote_branch() self.assertEqual(b"foo", porcelain.active_branch(target_repo)) _commit_file_with_content(target_repo, "bar", "something\n") @@ -1989,7 +1989,7 @@ def test_checkout_remote_branch_then_master_then_remote_branch_again(self): class SubmoduleTests(PorcelainTestCase): - def test_empty(self): + def test_empty(self) -> None: porcelain.commit( repo=self.repo.path, message=b"init", @@ -1999,7 +1999,7 @@ def test_empty(self): self.assertEqual([], list(porcelain.submodule_list(self.repo))) - def test_add(self): + def test_add(self) -> None: porcelain.submodule_add(self.repo, "../bar.git", "bar") with open(f"{self.repo.path}/.gitmodules") as f: self.assertEqual( @@ -2011,13 +2011,13 @@ def test_add(self): f.read(), ) - def test_init(self): + def test_init(self) -> None: porcelain.submodule_add(self.repo, "../bar.git", "bar") porcelain.submodule_init(self.repo) class PushTests(PorcelainTestCase): - def test_simple(self): + def test_simple(self) -> None: """Basic test of porcelain push where self.repo is the remote. First clone the remote, commit a file to the clone, then push the changes back to the remote. @@ -2101,7 +2101,7 @@ def test_simple(self): os.path.basename(fullpath), change.new.path.decode("ascii") ) - def test_local_missing(self): + def test_local_missing(self) -> None: """Pushing a new branch.""" outstream = BytesIO() errstream = BytesIO() @@ -2122,7 +2122,7 @@ def test_local_missing(self): errstream=errstream, ) - def test_new(self): + def test_new(self) -> None: """Pushing a new branch.""" outstream = BytesIO() errstream = BytesIO() @@ -2162,7 +2162,7 @@ def test_new(self): r_clone.get_refs(), ) - def test_delete(self): + def test_delete(self) -> None: """Basic test of porcelain push, removing a branch.""" outstream = BytesIO() errstream = BytesIO() @@ -2205,7 +2205,7 @@ def test_delete(self): self.repo.get_refs(), ) - def test_diverged(self): + def test_diverged(self) -> None: outstream = BytesIO() errstream = BytesIO() @@ -2289,7 +2289,7 @@ def test_diverged(self): class PullTests(PorcelainTestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() # create a file for initial commit handle, fullpath = tempfile.mkstemp(dir=self.repo.path) @@ -2324,7 +2324,7 @@ def setUp(self): self.assertIn(b"refs/heads/master", self.repo.refs) self.assertIn(b"refs/heads/master", target_repo.refs) - def test_simple(self): + def test_simple(self) -> None: outstream = BytesIO() errstream = BytesIO() @@ -2341,7 +2341,7 @@ def test_simple(self): with Repo(self.target_path) as r: self.assertEqual(r[b"HEAD"].id, self.repo[b"HEAD"].id) - def test_diverged(self): + def test_diverged(self) -> None: outstream = BytesIO() errstream = BytesIO() @@ -2389,7 +2389,7 @@ def test_diverged(self): with Repo(self.target_path) as r: self.assertEqual(r[b"refs/heads/master"].id, c3a) - def test_no_refspec(self): + def test_no_refspec(self) -> None: outstream = BytesIO() errstream = BytesIO() @@ -2405,7 +2405,7 @@ def test_no_refspec(self): with Repo(self.target_path) as r: self.assertEqual(r[b"HEAD"].id, self.repo[b"HEAD"].id) - def test_no_remote_location(self): + def test_no_remote_location(self) -> None: outstream = BytesIO() errstream = BytesIO() @@ -2423,12 +2423,12 @@ def test_no_remote_location(self): class StatusTests(PorcelainTestCase): - def test_empty(self): + def test_empty(self) -> None: results = porcelain.status(self.repo) self.assertEqual({"add": [], "delete": [], "modify": []}, results.staged) self.assertEqual([], results.unstaged) - def test_status_base(self): + def test_status_base(self) -> None: """Integration test for `status` functionality.""" # Commit a dummy file then modify it fullpath = os.path.join(self.repo.path, "foo") @@ -2461,7 +2461,7 @@ def test_status_base(self): self.assertEqual(results.staged["add"][0], filename_add.encode("ascii")) self.assertEqual(results.unstaged, [b"foo"]) - def test_status_all(self): + def test_status_all(self) -> None: del_path = os.path.join(self.repo.path, "foo") mod_path = os.path.join(self.repo.path, "bar") add_path = os.path.join(self.repo.path, "baz") @@ -2501,11 +2501,11 @@ def test_status_all(self): results_no_untracked = porcelain.status(self.repo.path, untracked_files="no") self.assertListEqual(results_no_untracked.untracked, []) - def test_status_wrong_untracked_files_value(self): + def test_status_wrong_untracked_files_value(self) -> None: with self.assertRaises(ValueError): porcelain.status(self.repo.path, untracked_files="antani") - def test_status_untracked_path(self): + def test_status_untracked_path(self) -> None: untracked_dir = os.path.join(self.repo_path, "untracked_dir") os.mkdir(untracked_dir) untracked_file = os.path.join(untracked_dir, "untracked_file") @@ -2515,7 +2515,7 @@ def test_status_untracked_path(self): _, _, untracked = porcelain.status(self.repo.path, untracked_files="all") self.assertEqual(untracked, ["untracked_dir/untracked_file"]) - def test_status_crlf_mismatch(self): + def test_status_crlf_mismatch(self) -> None: # First make a commit as if the file has been added on a Linux system # or with core.autocrlf=True file_path = os.path.join(self.repo.path, "crlf") @@ -2539,7 +2539,7 @@ def test_status_crlf_mismatch(self): self.assertListEqual(results.unstaged, [b"crlf"]) self.assertListEqual(results.untracked, []) - def test_status_autocrlf_true(self): + def test_status_autocrlf_true(self) -> None: # First make a commit as if the file has been added on a Linux system # or with core.autocrlf=True file_path = os.path.join(self.repo.path, "crlf") @@ -2568,7 +2568,7 @@ def test_status_autocrlf_true(self): self.assertListEqual(results.unstaged, []) self.assertListEqual(results.untracked, []) - def test_status_autocrlf_input(self): + def test_status_autocrlf_input(self) -> None: # Commit existing file with CRLF file_path = os.path.join(self.repo.path, "crlf-exists") with open(file_path, "wb") as f: @@ -2598,7 +2598,7 @@ def test_status_autocrlf_input(self): self.assertListEqual(results.unstaged, []) self.assertListEqual(results.untracked, []) - def test_get_tree_changes_add(self): + def test_get_tree_changes_add(self) -> None: """Unit test for get_tree_changes add.""" # Make a dummy file, stage filename = "bar" @@ -2625,7 +2625,7 @@ def test_get_tree_changes_add(self): self.assertEqual(len(changes["modify"]), 0) self.assertEqual(len(changes["delete"]), 0) - def test_get_tree_changes_modify(self): + def test_get_tree_changes_modify(self) -> None: """Unit test for get_tree_changes modify.""" # Make a dummy file, stage, commit, modify filename = "foo" @@ -2649,7 +2649,7 @@ def test_get_tree_changes_modify(self): self.assertEqual(len(changes["modify"]), 1) self.assertEqual(len(changes["delete"]), 0) - def test_get_tree_changes_delete(self): + def test_get_tree_changes_delete(self) -> None: """Unit test for get_tree_changes delete.""" # Make a dummy file, stage, commit, remove filename = "foo" @@ -2676,7 +2676,7 @@ def test_get_tree_changes_delete(self): self.assertEqual(len(changes["modify"]), 0) self.assertEqual(len(changes["delete"]), 1) - def test_get_untracked_paths(self): + def test_get_untracked_paths(self) -> None: with open(os.path.join(self.repo.path, ".gitignore"), "w") as f: f.write("ignored\n") with open(os.path.join(self.repo.path, "ignored"), "w") as f: @@ -2704,7 +2704,7 @@ def test_get_untracked_paths(self): set(porcelain.status(self.repo, ignored=True).untracked), ) - def test_get_untracked_paths_subrepo(self): + def test_get_untracked_paths_subrepo(self) -> None: with open(os.path.join(self.repo.path, ".gitignore"), "w") as f: f.write("nested/\n") with open(os.path.join(self.repo.path, "notignored"), "w") as f: @@ -2770,7 +2770,7 @@ def test_get_untracked_paths_subrepo(self): ), ) - def test_get_untracked_paths_subdir(self): + def test_get_untracked_paths_subdir(self) -> None: with open(os.path.join(self.repo.path, ".gitignore"), "w") as f: f.write("subdir/\nignored") with open(os.path.join(self.repo.path, "notignored"), "w") as f: @@ -2808,7 +2808,7 @@ def test_get_untracked_paths_subdir(self): ), ) - def test_get_untracked_paths_invalid_untracked_files(self): + def test_get_untracked_paths_invalid_untracked_files(self) -> None: with self.assertRaises(ValueError): list( porcelain.get_untracked_paths( @@ -2819,7 +2819,7 @@ def test_get_untracked_paths_invalid_untracked_files(self): ) ) - def test_get_untracked_paths_normal(self): + def test_get_untracked_paths_normal(self) -> None: with self.assertRaises(NotImplementedError): _, _, _ = porcelain.status(repo=self.repo.path, untracked_files="normal") @@ -2830,7 +2830,7 @@ def test_get_untracked_paths_normal(self): class UploadPackTests(PorcelainTestCase): """Tests for upload_pack.""" - def test_upload_pack(self): + def test_upload_pack(self) -> None: outf = BytesIO() exitcode = porcelain.upload_pack(self.repo.path, BytesIO(b"0000"), outf) outlines = outf.getvalue().splitlines() @@ -2841,7 +2841,7 @@ def test_upload_pack(self): class ReceivePackTests(PorcelainTestCase): """Tests for receive_pack.""" - def test_receive_pack(self): + def test_receive_pack(self) -> None: filename = "foo" fullpath = os.path.join(self.repo.path, filename) with open(fullpath, "w") as f: @@ -2873,10 +2873,10 @@ def test_receive_pack(self): class BranchListTests(PorcelainTestCase): - def test_standard(self): + def test_standard(self) -> None: self.assertEqual(set(), set(porcelain.branch_list(self.repo))) - def test_new_branch(self): + def test_new_branch(self) -> None: [c1] = build_commit_graph(self.repo.object_store, [[1]]) self.repo[b"HEAD"] = c1.id porcelain.branch_create(self.repo, b"foo") @@ -2884,14 +2884,14 @@ def test_new_branch(self): class BranchCreateTests(PorcelainTestCase): - def test_branch_exists(self): + def test_branch_exists(self) -> None: [c1] = build_commit_graph(self.repo.object_store, [[1]]) self.repo[b"HEAD"] = c1.id porcelain.branch_create(self.repo, b"foo") self.assertRaises(porcelain.Error, porcelain.branch_create, self.repo, b"foo") porcelain.branch_create(self.repo, b"foo", force=True) - def test_new_branch(self): + def test_new_branch(self) -> None: [c1] = build_commit_graph(self.repo.object_store, [[1]]) self.repo[b"HEAD"] = c1.id porcelain.branch_create(self.repo, b"foo") @@ -2899,7 +2899,7 @@ def test_new_branch(self): class BranchDeleteTests(PorcelainTestCase): - def test_simple(self): + def test_simple(self) -> None: [c1] = build_commit_graph(self.repo.object_store, [[1]]) self.repo[b"HEAD"] = c1.id porcelain.branch_create(self.repo, b"foo") @@ -2907,7 +2907,7 @@ def test_simple(self): porcelain.branch_delete(self.repo, b"foo") self.assertNotIn(b"foo", porcelain.branch_list(self.repo)) - def test_simple_unicode(self): + def test_simple_unicode(self) -> None: [c1] = build_commit_graph(self.repo.object_store, [[1]]) self.repo[b"HEAD"] = c1.id porcelain.branch_create(self.repo, "foo") @@ -2917,7 +2917,7 @@ def test_simple_unicode(self): class FetchTests(PorcelainTestCase): - def test_simple(self): + def test_simple(self) -> None: outstream = BytesIO() errstream = BytesIO() @@ -2963,7 +2963,7 @@ def test_simple(self): with Repo(target_path) as r: self.assertIn(self.repo[b"HEAD"].id, r) - def test_with_remote_name(self): + def test_with_remote_name(self) -> None: remote_name = "origin" outstream = BytesIO() errstream = BytesIO() @@ -3024,7 +3024,7 @@ def test_with_remote_name(self): def assert_correct_remote_refs( self, local_refs, remote_refs, remote_name=b"origin" - ): + ) -> None: """Assert that known remote refs corresponds to actual remote refs.""" local_ref_prefix = b"refs/heads" remote_ref_prefix = b"refs/remotes/" + remote_name @@ -3047,10 +3047,10 @@ def assert_correct_remote_refs( class RepackTests(PorcelainTestCase): - def test_empty(self): + def test_empty(self) -> None: porcelain.repack(self.repo) - def test_simple(self): + def test_simple(self) -> None: handle, fullpath = tempfile.mkstemp(dir=self.repo.path) os.close(handle) porcelain.add(repo=self.repo.path, paths=fullpath) @@ -3058,7 +3058,7 @@ def test_simple(self): class LsTreeTests(PorcelainTestCase): - def test_empty(self): + def test_empty(self) -> None: porcelain.commit( repo=self.repo.path, message=b"test status", @@ -3070,7 +3070,7 @@ def test_empty(self): porcelain.ls_tree(self.repo, b"HEAD", outstream=f) self.assertEqual(f.getvalue(), "") - def test_simple(self): + def test_simple(self) -> None: # Commit a dummy file then modify it fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: @@ -3091,7 +3091,7 @@ def test_simple(self): "100644 blob 8b82634d7eae019850bb883f06abf428c58bc9aa\tfoo\n", ) - def test_recursive(self): + def test_recursive(self) -> None: # Create a directory then write a dummy file in it dirpath = os.path.join(self.repo.path, "adir") filepath = os.path.join(dirpath, "afile") @@ -3122,10 +3122,10 @@ def test_recursive(self): class LsRemoteTests(PorcelainTestCase): - def test_empty(self): + def test_empty(self) -> None: self.assertEqual({}, porcelain.ls_remote(self.repo.path)) - def test_some(self): + def test_some(self) -> None: cid = porcelain.commit( repo=self.repo.path, message=b"test status", @@ -3140,10 +3140,10 @@ def test_some(self): class LsFilesTests(PorcelainTestCase): - def test_empty(self): + def test_empty(self) -> None: self.assertEqual([], list(porcelain.ls_files(self.repo))) - def test_simple(self): + def test_simple(self) -> None: # Commit a dummy file then modify it fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: @@ -3154,7 +3154,7 @@ def test_simple(self): class RemoteAddTests(PorcelainTestCase): - def test_new(self): + def test_new(self) -> None: porcelain.remote_add(self.repo, "jelmer", "git://jelmer.uk/code/dulwich") c = self.repo.get_config() self.assertEqual( @@ -3162,7 +3162,7 @@ def test_new(self): b"git://jelmer.uk/code/dulwich", ) - def test_exists(self): + def test_exists(self) -> None: porcelain.remote_add(self.repo, "jelmer", "git://jelmer.uk/code/dulwich") self.assertRaises( porcelain.RemoteExists, @@ -3174,7 +3174,7 @@ def test_exists(self): class RemoteRemoveTests(PorcelainTestCase): - def test_remove(self): + def test_remove(self) -> None: porcelain.remote_add(self.repo, "jelmer", "git://jelmer.uk/code/dulwich") c = self.repo.get_config() self.assertEqual( @@ -3188,7 +3188,7 @@ def test_remove(self): class CheckIgnoreTests(PorcelainTestCase): - def test_check_ignored(self): + def test_check_ignored(self) -> None: with open(os.path.join(self.repo.path, ".gitignore"), "w") as f: f.write("foo") foo_path = os.path.join(self.repo.path, "foo") @@ -3200,7 +3200,7 @@ def test_check_ignored(self): self.assertEqual(["foo"], list(porcelain.check_ignore(self.repo, [foo_path]))) self.assertEqual([], list(porcelain.check_ignore(self.repo, [bar_path]))) - def test_check_added_abs(self): + def test_check_added_abs(self) -> None: path = os.path.join(self.repo.path, "foo") with open(path, "w") as f: f.write("BAR") @@ -3213,7 +3213,7 @@ def test_check_added_abs(self): list(porcelain.check_ignore(self.repo, [path], no_index=True)), ) - def test_check_added_rel(self): + def test_check_added_rel(self) -> None: with open(os.path.join(self.repo.path, "foo"), "w") as f: f.write("BAR") self.repo.stage(["foo"]) @@ -3233,28 +3233,28 @@ def test_check_added_rel(self): class UpdateHeadTests(PorcelainTestCase): - def test_set_to_branch(self): + def test_set_to_branch(self) -> None: [c1] = build_commit_graph(self.repo.object_store, [[1]]) self.repo.refs[b"refs/heads/blah"] = c1.id porcelain.update_head(self.repo, "blah") self.assertEqual(c1.id, self.repo.head()) self.assertEqual(b"ref: refs/heads/blah", self.repo.refs.read_ref(b"HEAD")) - def test_set_to_branch_detached(self): + def test_set_to_branch_detached(self) -> None: [c1] = build_commit_graph(self.repo.object_store, [[1]]) self.repo.refs[b"refs/heads/blah"] = c1.id porcelain.update_head(self.repo, "blah", detached=True) self.assertEqual(c1.id, self.repo.head()) self.assertEqual(c1.id, self.repo.refs.read_ref(b"HEAD")) - def test_set_to_commit_detached(self): + def test_set_to_commit_detached(self) -> None: [c1] = build_commit_graph(self.repo.object_store, [[1]]) self.repo.refs[b"refs/heads/blah"] = c1.id porcelain.update_head(self.repo, c1.id, detached=True) self.assertEqual(c1.id, self.repo.head()) self.assertEqual(c1.id, self.repo.refs.read_ref(b"HEAD")) - def test_set_new_branch(self): + def test_set_new_branch(self) -> None: [c1] = build_commit_graph(self.repo.object_store, [[1]]) self.repo.refs[b"refs/heads/blah"] = c1.id porcelain.update_head(self.repo, "blah", new_branch="bar") @@ -3263,13 +3263,13 @@ def test_set_new_branch(self): class MailmapTests(PorcelainTestCase): - def test_no_mailmap(self): + def test_no_mailmap(self) -> None: self.assertEqual( b"Jelmer Vernooij ", porcelain.check_mailmap(self.repo, b"Jelmer Vernooij "), ) - def test_mailmap_lookup(self): + def test_mailmap_lookup(self) -> None: with open(os.path.join(self.repo.path, ".mailmap"), "wb") as f: f.write( b"""\ @@ -3283,10 +3283,10 @@ def test_mailmap_lookup(self): class FsckTests(PorcelainTestCase): - def test_none(self): + def test_none(self) -> None: self.assertEqual([], list(porcelain.fsck(self.repo))) - def test_git_dir(self): + def test_git_dir(self) -> None: obj = Tree() a = Blob() a.data = b"foo" @@ -3299,10 +3299,10 @@ def test_git_dir(self): class DescribeTests(PorcelainTestCase): - def test_no_commits(self): + def test_no_commits(self) -> None: self.assertRaises(KeyError, porcelain.describe, self.repo.path) - def test_single_commit(self): + def test_single_commit(self) -> None: fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") @@ -3318,7 +3318,7 @@ def test_single_commit(self): porcelain.describe(self.repo.path), ) - def test_tag(self): + def test_tag(self) -> None: fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") @@ -3338,7 +3338,7 @@ def test_tag(self): ) self.assertEqual("tryme", porcelain.describe(self.repo.path)) - def test_tag_and_commit(self): + def test_tag_and_commit(self) -> None: fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") @@ -3370,7 +3370,7 @@ def test_tag_and_commit(self): porcelain.describe(self.repo.path), ) - def test_tag_and_commit_full(self): + def test_tag_and_commit_full(self) -> None: fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") @@ -3404,7 +3404,7 @@ def test_tag_and_commit_full(self): class PathToTreeTests(PorcelainTestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.fp = os.path.join(self.test_dir, "bar") with open(self.fp, "w") as f: @@ -3413,7 +3413,7 @@ def setUp(self): self.addCleanup(os.chdir, oldcwd) os.chdir(self.test_dir) - def test_path_to_tree_path_base(self): + def test_path_to_tree_path_base(self) -> None: self.assertEqual(b"bar", porcelain.path_to_tree_path(self.test_dir, self.fp)) self.assertEqual(b"bar", porcelain.path_to_tree_path(".", "./bar")) self.assertEqual(b"bar", porcelain.path_to_tree_path(".", "bar")) @@ -3423,15 +3423,15 @@ def test_path_to_tree_path_base(self): ) self.assertEqual(b"bar", porcelain.path_to_tree_path(cwd, "bar")) - def test_path_to_tree_path_syntax(self): + def test_path_to_tree_path_syntax(self) -> None: self.assertEqual(b"bar", porcelain.path_to_tree_path(".", "./bar")) - def test_path_to_tree_path_error(self): + def test_path_to_tree_path_error(self) -> None: with self.assertRaises(ValueError): with tempfile.TemporaryDirectory() as od: porcelain.path_to_tree_path(od, self.fp) - def test_path_to_tree_path_rel(self): + def test_path_to_tree_path_rel(self) -> None: cwd = os.getcwd() os.mkdir(os.path.join(self.repo.path, "foo")) os.mkdir(os.path.join(self.repo.path, "foo/bar")) @@ -3460,7 +3460,7 @@ def test_path_to_tree_path_rel(self): class GetObjectByPathTests(PorcelainTestCase): - def test_simple(self): + def test_simple(self) -> None: fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") @@ -3474,7 +3474,7 @@ def test_simple(self): self.assertEqual(b"BAR", porcelain.get_object_by_path(self.repo, "foo").data) self.assertEqual(b"BAR", porcelain.get_object_by_path(self.repo, b"foo").data) - def test_encoding(self): + def test_encoding(self) -> None: fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") @@ -3489,12 +3489,12 @@ def test_encoding(self): self.assertEqual(b"BAR", porcelain.get_object_by_path(self.repo, "foo").data) self.assertEqual(b"BAR", porcelain.get_object_by_path(self.repo, b"foo").data) - def test_missing(self): + def test_missing(self) -> None: self.assertRaises(KeyError, porcelain.get_object_by_path, self.repo, "foo") class WriteTreeTests(PorcelainTestCase): - def test_simple(self): + def test_simple(self) -> None: fullpath = os.path.join(self.repo.path, "foo") with open(fullpath, "w") as f: f.write("BAR") @@ -3506,12 +3506,12 @@ def test_simple(self): class ActiveBranchTests(PorcelainTestCase): - def test_simple(self): + def test_simple(self) -> None: self.assertEqual(b"master", porcelain.active_branch(self.repo)) class FindUniqueAbbrevTests(PorcelainTestCase): - def test_simple(self): + def test_simple(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -3523,7 +3523,7 @@ def test_simple(self): class PackRefsTests(PorcelainTestCase): - def test_all(self): + def test_all(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -3541,7 +3541,7 @@ def test_all(self): }, ) - def test_not_all(self): + def test_not_all(self) -> None: c1, c2, c3 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2]] ) @@ -3573,7 +3573,7 @@ def _serving(self): server.shutdown() thread.join(10) - def setUp(self): + def setUp(self) -> None: super().setUp() self.served_repo_path = os.path.join(self.test_dir, "served_repo.git") @@ -3583,14 +3583,14 @@ def setUp(self): backend = DictBackend({"/": self.served_repo}) self.app = make_wsgi_chain(backend) - def test_pull(self): + def test_pull(self) -> None: (c1,) = build_commit_graph(self.served_repo.object_store, [[1]]) self.served_repo.refs[b"refs/heads/master"] = c1.id with self._serving() as url: porcelain.pull(self.repo, url, "master") - def test_push(self): + def test_push(self) -> None: (c1,) = build_commit_graph(self.repo.object_store, [[1]]) self.repo.refs[b"refs/heads/master"] = c1.id @@ -3599,7 +3599,7 @@ def test_push(self): class ForEachTests(PorcelainTestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() c1, c2, c3, c4 = build_commit_graph( self.repo.object_store, [[1], [2, 1], [3, 1, 2], [4]] @@ -3631,7 +3631,7 @@ def setUp(self): ) self.repo.refs[b"HEAD"] = c4.id - def test_for_each_ref(self): + def test_for_each_ref(self) -> None: refs = porcelain.for_each_ref(self.repo) self.assertEqual( @@ -3646,7 +3646,7 @@ def test_for_each_ref(self): ], ) - def test_for_each_ref_pattern(self): + def test_for_each_ref_pattern(self) -> None: versions = porcelain.for_each_ref(self.repo, pattern="refs/tags/v*") self.assertEqual( [(object_type, tag) for _, object_type, tag in versions], diff --git a/tests/test_protocol.py b/tests/test_protocol.py index c374eb418..c76a4be3a 100644 --- a/tests/test_protocol.py +++ b/tests/test_protocol.py @@ -44,17 +44,17 @@ class PktLineTests(TestCase): - def test_pkt_line(self): + def test_pkt_line(self) -> None: self.assertEqual(b"0007bla", pkt_line(b"bla")) self.assertEqual(b"0000", pkt_line(None)) - def test_pkt_seq(self): + def test_pkt_seq(self) -> None: self.assertEqual(b"0007bla0007foo0000", pkt_seq(b"bla", b"foo")) self.assertEqual(b"0000", pkt_seq()) class FilterRefPrefixTests(TestCase): - def test_filter_ref_prefix(self): + def test_filter_ref_prefix(self) -> None: self.assertEqual( {b"refs/heads/foo": b"0123456789", b"refs/heads/bar": b"0123456789"}, filter_ref_prefix( @@ -69,20 +69,20 @@ def test_filter_ref_prefix(self): class BaseProtocolTests: - def test_write_pkt_line_none(self): + def test_write_pkt_line_none(self) -> None: self.proto.write_pkt_line(None) self.assertEqual(self.rout.getvalue(), b"0000") - def test_write_pkt_line(self): + def test_write_pkt_line(self) -> None: self.proto.write_pkt_line(b"bla") self.assertEqual(self.rout.getvalue(), b"0007bla") - def test_read_pkt_line(self): + def test_read_pkt_line(self) -> None: self.rin.write(b"0008cmd ") self.rin.seek(0) self.assertEqual(b"cmd ", self.proto.read_pkt_line()) - def test_eof(self): + def test_eof(self) -> None: self.rin.write(b"0000") self.rin.seek(0) self.assertFalse(self.proto.eof()) @@ -90,7 +90,7 @@ def test_eof(self): self.assertTrue(self.proto.eof()) self.assertRaises(HangupException, self.proto.read_pkt_line) - def test_unread_pkt_line(self): + def test_unread_pkt_line(self) -> None: self.rin.write(b"0007foo0000") self.rin.seek(0) self.assertEqual(b"foo", self.proto.read_pkt_line()) @@ -100,42 +100,42 @@ def test_unread_pkt_line(self): self.proto.unread_pkt_line(b"baz1") self.assertRaises(ValueError, self.proto.unread_pkt_line, b"baz2") - def test_read_pkt_seq(self): + def test_read_pkt_seq(self) -> None: self.rin.write(b"0008cmd 0005l0000") self.rin.seek(0) self.assertEqual([b"cmd ", b"l"], list(self.proto.read_pkt_seq())) - def test_read_pkt_line_none(self): + def test_read_pkt_line_none(self) -> None: self.rin.write(b"0000") self.rin.seek(0) self.assertEqual(None, self.proto.read_pkt_line()) - def test_read_pkt_line_wrong_size(self): + def test_read_pkt_line_wrong_size(self) -> None: self.rin.write(b"0100too short") self.rin.seek(0) self.assertRaises(GitProtocolError, self.proto.read_pkt_line) - def test_write_sideband(self): + def test_write_sideband(self) -> None: self.proto.write_sideband(3, b"bloe") self.assertEqual(self.rout.getvalue(), b"0009\x03bloe") - def test_send_cmd(self): + def test_send_cmd(self) -> None: self.proto.send_cmd(b"fetch", b"a", b"b") self.assertEqual(self.rout.getvalue(), b"000efetch a\x00b\x00") - def test_read_cmd(self): + def test_read_cmd(self) -> None: self.rin.write(b"0012cmd arg1\x00arg2\x00") self.rin.seek(0) self.assertEqual((b"cmd", [b"arg1", b"arg2"]), self.proto.read_cmd()) - def test_read_cmd_noend0(self): + def test_read_cmd_noend0(self) -> None: self.rin.write(b"0011cmd arg1\x00arg2") self.rin.seek(0) self.assertRaises(AssertionError, self.proto.read_cmd) class ProtocolTests(BaseProtocolTests, TestCase): - def setUp(self): + def setUp(self) -> None: TestCase.setUp(self) self.rout = BytesIO() self.rin = BytesIO() @@ -161,21 +161,21 @@ def recv(self, size): class ReceivableProtocolTests(BaseProtocolTests, TestCase): - def setUp(self): + def setUp(self) -> None: TestCase.setUp(self) self.rout = BytesIO() self.rin = ReceivableBytesIO() self.proto = ReceivableProtocol(self.rin.recv, self.rout.write) self.proto._rbufsize = 8 - def test_eof(self): + def test_eof(self) -> None: # Allow blocking reads past EOF just for this test. The only parts of # the protocol that might check for EOF do not depend on the recv() # semantics anyway. self.rin.allow_read_past_eof = True BaseProtocolTests.test_eof(self) - def test_recv(self): + def test_recv(self) -> None: all_data = b"1234567" * 10 # not a multiple of bufsize self.rin.write(all_data) self.rin.seek(0) @@ -188,7 +188,7 @@ def test_recv(self): self.assertRaises(GitProtocolError, self.proto.recv, 10) self.assertEqual(all_data, data) - def test_recv_read(self): + def test_recv_read(self) -> None: all_data = b"1234567" # recv exactly in one call self.rin.write(all_data) self.rin.seek(0) @@ -196,7 +196,7 @@ def test_recv_read(self): self.assertEqual(b"567", self.proto.read(3)) self.assertRaises(GitProtocolError, self.proto.recv, 10) - def test_read_recv(self): + def test_read_recv(self) -> None: all_data = b"12345678abcdefg" self.rin.write(all_data) self.rin.seek(0) @@ -205,7 +205,7 @@ def test_read_recv(self): self.assertEqual(b"defg", self.proto.read(4)) self.assertRaises(GitProtocolError, self.proto.recv, 10) - def test_mixed(self): + def test_mixed(self) -> None: # arbitrary non-repeating string all_data = b",".join(str(i).encode("ascii") for i in range(100)) self.rin.write(all_data) @@ -231,18 +231,18 @@ def test_mixed(self): class CapabilitiesTestCase(TestCase): - def test_plain(self): + def test_plain(self) -> None: self.assertEqual((b"bla", []), extract_capabilities(b"bla")) - def test_caps(self): + def test_caps(self) -> None: self.assertEqual((b"bla", [b"la"]), extract_capabilities(b"bla\0la")) self.assertEqual((b"bla", [b"la"]), extract_capabilities(b"bla\0la\n")) self.assertEqual((b"bla", [b"la", b"la"]), extract_capabilities(b"bla\0la la")) - def test_plain_want_line(self): + def test_plain_want_line(self) -> None: self.assertEqual((b"want bla", []), extract_want_line_capabilities(b"want bla")) - def test_caps_want_line(self): + def test_caps_want_line(self) -> None: self.assertEqual( (b"want bla", [b"la"]), extract_want_line_capabilities(b"want bla la"), @@ -256,7 +256,7 @@ def test_caps_want_line(self): extract_want_line_capabilities(b"want bla la la"), ) - def test_ack_type(self): + def test_ack_type(self) -> None: self.assertEqual(SINGLE_ACK, ack_type([b"foo", b"bar"])) self.assertEqual(MULTI_ACK, ack_type([b"foo", b"bar", b"multi_ack"])) self.assertEqual( @@ -271,42 +271,42 @@ def test_ack_type(self): class BufferedPktLineWriterTests(TestCase): - def setUp(self): + def setUp(self) -> None: TestCase.setUp(self) self._output = BytesIO() self._writer = BufferedPktLineWriter(self._output.write, bufsize=16) - def assertOutputEquals(self, expected): + def assertOutputEquals(self, expected) -> None: self.assertEqual(expected, self._output.getvalue()) - def _truncate(self): + def _truncate(self) -> None: self._output.seek(0) self._output.truncate() - def test_write(self): + def test_write(self) -> None: self._writer.write(b"foo") self.assertOutputEquals(b"") self._writer.flush() self.assertOutputEquals(b"0007foo") - def test_write_none(self): + def test_write_none(self) -> None: self._writer.write(None) self.assertOutputEquals(b"") self._writer.flush() self.assertOutputEquals(b"0000") - def test_flush_empty(self): + def test_flush_empty(self) -> None: self._writer.flush() self.assertOutputEquals(b"") - def test_write_multiple(self): + def test_write_multiple(self) -> None: self._writer.write(b"foo") self._writer.write(b"bar") self.assertOutputEquals(b"") self._writer.flush() self.assertOutputEquals(b"0007foo0007bar") - def test_write_across_boundary(self): + def test_write_across_boundary(self) -> None: self._writer.write(b"foo") self._writer.write(b"barbaz") self.assertOutputEquals(b"0007foo000abarba") @@ -314,7 +314,7 @@ def test_write_across_boundary(self): self._writer.flush() self.assertOutputEquals(b"z") - def test_write_to_boundary(self): + def test_write_to_boundary(self) -> None: self._writer.write(b"foo") self._writer.write(b"barba") self.assertOutputEquals(b"0007foo0009barba") @@ -325,14 +325,14 @@ def test_write_to_boundary(self): class PktLineParserTests(TestCase): - def test_none(self): + def test_none(self) -> None: pktlines = [] parser = PktLineParser(pktlines.append) parser.parse(b"0000") self.assertEqual(pktlines, [None]) self.assertEqual(b"", parser.get_tail()) - def test_small_fragments(self): + def test_small_fragments(self) -> None: pktlines = [] parser = PktLineParser(pktlines.append) parser.parse(b"00") @@ -341,7 +341,7 @@ def test_small_fragments(self): self.assertEqual(pktlines, [b"z", None]) self.assertEqual(b"", parser.get_tail()) - def test_multiple_packets(self): + def test_multiple_packets(self) -> None: pktlines = [] parser = PktLineParser(pktlines.append) parser.parse(b"0005z0006aba") diff --git a/tests/test_reflog.py b/tests/test_reflog.py index a8b4cf92e..9aa48dee0 100644 --- a/tests/test_reflog.py +++ b/tests/test_reflog.py @@ -34,7 +34,7 @@ class ReflogLineTests(TestCase): - def test_format(self): + def test_format(self) -> None: self.assertEqual( b"0000000000000000000000000000000000000000 " b"49030649db3dfec5a9bc03e5dde4255a14499f16 Jelmer Vernooij " @@ -65,7 +65,7 @@ def test_format(self): ), ) - def test_parse(self): + def test_parse(self) -> None: reflog_line = ( b"0000000000000000000000000000000000000000 " b"49030649db3dfec5a9bc03e5dde4255a14499f16 Jelmer Vernooij " @@ -102,7 +102,7 @@ def test_parse(self): class ReflogDropTests(TestCase): - def setUp(self): + def setUp(self) -> None: TestCase.setUp(self) self.f = BytesIO(_TEST_REFLOG) self.original_log = list(read_reflog(self.f)) @@ -112,10 +112,10 @@ def _read_log(self): self.f.seek(0) return list(read_reflog(self.f)) - def test_invalid(self): + def test_invalid(self) -> None: self.assertRaises(ValueError, drop_reflog_entry, self.f, -1) - def test_drop_entry(self): + def test_drop_entry(self) -> None: drop_reflog_entry(self.f, 0) log = self._read_log() self.assertEqual(len(log), 2) @@ -127,7 +127,7 @@ def test_drop_entry(self): self.assertEqual(len(log), 1) self.assertEqual(self.original_log[1], log[0]) - def test_drop_entry_with_rewrite(self): + def test_drop_entry_with_rewrite(self) -> None: drop_reflog_entry(self.f, 1, True) log = self._read_log() self.assertEqual(len(log), 2) diff --git a/tests/test_refs.py b/tests/test_refs.py index b287136a8..c30740187 100644 --- a/tests/test_refs.py +++ b/tests/test_refs.py @@ -54,7 +54,7 @@ class CheckRefFormatTests(TestCase): These are the same tests as in the git test suite. """ - def test_valid(self): + def test_valid(self) -> None: self.assertTrue(check_ref_format(b"heads/foo")) self.assertTrue(check_ref_format(b"foo/bar/baz")) self.assertTrue(check_ref_format(b"refs///heads/foo")) @@ -62,7 +62,7 @@ def test_valid(self): self.assertTrue(check_ref_format(b"heads/foo@bar")) self.assertTrue(check_ref_format(b"heads/fix.lock.error")) - def test_invalid(self): + def test_invalid(self) -> None: self.assertFalse(check_ref_format(b"foo")) self.assertFalse(check_ref_format(b"heads/foo/")) self.assertFalse(check_ref_format(b"./foo")) @@ -81,7 +81,7 @@ def test_invalid(self): class PackedRefsFileTests(TestCase): - def test_split_ref_line_errors(self): + def test_split_ref_line_errors(self) -> None: self.assertRaises(errors.PackedRefsException, _split_ref_line, b"singlefield") self.assertRaises(errors.PackedRefsException, _split_ref_line, b"badsha name") self.assertRaises( @@ -90,17 +90,17 @@ def test_split_ref_line_errors(self): ONES + b" bad/../refname", ) - def test_read_without_peeled(self): + def test_read_without_peeled(self) -> None: f = BytesIO(b"\n".join([b"# comment", ONES + b" ref/1", TWOS + b" ref/2"])) self.assertEqual( [(ONES, b"ref/1"), (TWOS, b"ref/2")], list(read_packed_refs(f)) ) - def test_read_without_peeled_errors(self): + def test_read_without_peeled_errors(self) -> None: f = BytesIO(b"\n".join([ONES + b" ref/1", b"^" + TWOS])) self.assertRaises(errors.PackedRefsException, list, read_packed_refs(f)) - def test_read_with_peeled(self): + def test_read_with_peeled(self) -> None: f = BytesIO( b"\n".join( [ @@ -120,14 +120,14 @@ def test_read_with_peeled(self): list(read_packed_refs_with_peeled(f)), ) - def test_read_with_peeled_errors(self): + def test_read_with_peeled_errors(self) -> None: f = BytesIO(b"\n".join([b"^" + TWOS, ONES + b" ref/1"])) self.assertRaises(errors.PackedRefsException, list, read_packed_refs(f)) f = BytesIO(b"\n".join([ONES + b" ref/1", b"^" + TWOS, b"^" + THREES])) self.assertRaises(errors.PackedRefsException, list, read_packed_refs(f)) - def test_write_with_peeled(self): + def test_write_with_peeled(self) -> None: f = BytesIO() write_packed_refs(f, {b"ref/1": ONES, b"ref/2": TWOS}, {b"ref/1": THREES}) self.assertEqual( @@ -143,7 +143,7 @@ def test_write_with_peeled(self): f.getvalue(), ) - def test_write_without_peeled(self): + def test_write_without_peeled(self) -> None: f = BytesIO() write_packed_refs(f, {b"ref/1": ONES, b"ref/2": TWOS}) self.assertEqual( @@ -165,7 +165,7 @@ def test_write_without_peeled(self): class RefsContainerTests: - def test_keys(self): + def test_keys(self) -> None: actual_keys = set(self._refs.keys()) self.assertEqual(set(self._refs.allkeys()), actual_keys) self.assertEqual(set(_TEST_REFS.keys()), actual_keys) @@ -180,18 +180,18 @@ def test_keys(self): [b"refs-0.1", b"refs-0.2"], sorted(self._refs.keys(b"refs/tags")) ) - def test_iter(self): + def test_iter(self) -> None: actual_keys = set(self._refs.keys()) self.assertEqual(set(self._refs), actual_keys) self.assertEqual(set(_TEST_REFS.keys()), actual_keys) - def test_as_dict(self): + def test_as_dict(self) -> None: # refs/heads/loop does not show up even if it exists expected_refs = dict(_TEST_REFS) del expected_refs[b"refs/heads/loop"] self.assertEqual(expected_refs, self._refs.as_dict()) - def test_get_symrefs(self): + def test_get_symrefs(self) -> None: self._refs.set_symbolic_ref(b"refs/heads/src", b"refs/heads/dst") symrefs = self._refs.get_symrefs() if b"HEAD" in symrefs: @@ -204,7 +204,7 @@ def test_get_symrefs(self): symrefs, ) - def test_setitem(self): + def test_setitem(self) -> None: self._refs[b"refs/some/ref"] = b"42d06bd4b77fed026b154d16493e5deab78f02ec" self.assertEqual( b"42d06bd4b77fed026b154d16493e5deab78f02ec", @@ -234,7 +234,7 @@ def test_setitem(self): b"42d06bd", ) - def test_set_if_equals(self): + def test_set_if_equals(self) -> None: nines = b"9" * 40 self.assertFalse(self._refs.set_if_equals(b"HEAD", b"c0ffee", nines)) self.assertEqual( @@ -260,7 +260,7 @@ def test_set_if_equals(self): ) self.assertEqual(nines, self._refs[b"refs/heads/nonexistent"]) - def test_add_if_new(self): + def test_add_if_new(self) -> None: nines = b"9" * 40 self.assertFalse(self._refs.add_if_new(b"refs/heads/master", nines)) self.assertEqual( @@ -271,7 +271,7 @@ def test_add_if_new(self): self.assertTrue(self._refs.add_if_new(b"refs/some/ref", nines)) self.assertEqual(nines, self._refs[b"refs/some/ref"]) - def test_set_symbolic_ref(self): + def test_set_symbolic_ref(self) -> None: self._refs.set_symbolic_ref(b"refs/heads/symbolic", b"refs/heads/master") self.assertEqual( b"ref: refs/heads/master", @@ -282,7 +282,7 @@ def test_set_symbolic_ref(self): self._refs[b"refs/heads/symbolic"], ) - def test_set_symbolic_ref_overwrite(self): + def test_set_symbolic_ref_overwrite(self) -> None: nines = b"9" * 40 self.assertNotIn(b"refs/heads/symbolic", self._refs) self._refs[b"refs/heads/symbolic"] = nines @@ -297,7 +297,7 @@ def test_set_symbolic_ref_overwrite(self): self._refs[b"refs/heads/symbolic"], ) - def test_check_refname(self): + def test_check_refname(self) -> None: self._refs._check_refname(b"HEAD") self._refs._check_refname(b"refs/stash") self._refs._check_refname(b"refs/heads/foo") @@ -307,11 +307,11 @@ def test_check_refname(self): errors.RefFormatError, self._refs._check_refname, b"notrefs/foo" ) - def test_contains(self): + def test_contains(self) -> None: self.assertIn(b"refs/heads/master", self._refs) self.assertNotIn(b"refs/heads/bar", self._refs) - def test_delitem(self): + def test_delitem(self) -> None: self.assertEqual( b"42d06bd4b77fed026b154d16493e5deab78f02ec", self._refs[b"refs/heads/master"], @@ -319,7 +319,7 @@ def test_delitem(self): del self._refs[b"refs/heads/master"] self.assertRaises(KeyError, lambda: self._refs[b"refs/heads/master"]) - def test_remove_if_equals(self): + def test_remove_if_equals(self) -> None: self.assertFalse(self._refs.remove_if_equals(b"HEAD", b"c0ffee")) self.assertEqual( b"42d06bd4b77fed026b154d16493e5deab78f02ec", self._refs[b"HEAD"] @@ -333,7 +333,7 @@ def test_remove_if_equals(self): self.assertTrue(self._refs.remove_if_equals(b"refs/tags/refs-0.2", ZERO_SHA)) self.assertNotIn(b"refs/tags/refs-0.2", self._refs) - def test_import_refs_name(self): + def test_import_refs_name(self) -> None: self._refs[b"refs/remotes/origin/other"] = ( b"48d01bd4b77fed026b154d16493e5deab78f02ec" ) @@ -350,7 +350,7 @@ def test_import_refs_name(self): self._refs[b"refs/remotes/origin/other"], ) - def test_import_refs_name_prune(self): + def test_import_refs_name_prune(self) -> None: self._refs[b"refs/remotes/origin/other"] = ( b"48d01bd4b77fed026b154d16493e5deab78f02ec" ) @@ -367,11 +367,11 @@ def test_import_refs_name_prune(self): class DictRefsContainerTests(RefsContainerTests, TestCase): - def setUp(self): + def setUp(self) -> None: TestCase.setUp(self) self._refs = DictRefsContainer(dict(_TEST_REFS)) - def test_invalid_refname(self): + def test_invalid_refname(self) -> None: # FIXME: Move this test into RefsContainerTests, but requires # some way of injecting invalid refs. self._refs._refs[b"refs/stash"] = b"00" * 20 @@ -382,13 +382,13 @@ def test_invalid_refname(self): class DiskRefsContainerTests(RefsContainerTests, TestCase): - def setUp(self): + def setUp(self) -> None: TestCase.setUp(self) self._repo = open_repo("refs.git") self.addCleanup(tear_down_repo, self._repo) self._refs = self._repo.refs - def test_get_packed_refs(self): + def test_get_packed_refs(self) -> None: self.assertEqual( { b"refs/heads/packed": b"42d06bd4b77fed026b154d16493e5deab78f02ec", @@ -397,7 +397,7 @@ def test_get_packed_refs(self): self._refs.get_packed_refs(), ) - def test_get_peeled_not_packed(self): + def test_get_peeled_not_packed(self) -> None: # not packed self.assertEqual(None, self._refs.get_peeled(b"refs/tags/refs-0.2")) self.assertEqual( @@ -417,7 +417,7 @@ def test_get_peeled_not_packed(self): self._refs.get_peeled(b"refs/tags/refs-0.1"), ) - def test_setitem(self): + def test_setitem(self) -> None: RefsContainerTests.test_setitem(self) path = os.path.join(self._refs.path, b"refs", b"some", b"ref") with open(path, "rb") as f: @@ -430,14 +430,14 @@ def test_setitem(self): b"42d06bd4b77fed026b154d16493e5deab78f02ec", ) - def test_delete_refs_container(self): + def test_delete_refs_container(self) -> None: # We shouldn't delete the refs directory self._refs[b"refs/heads/blah"] = b"42d06bd4b77fed026b154d16493e5deab78f02ec" for ref in self._refs.allkeys(): del self._refs[ref] self.assertTrue(os.path.exists(os.path.join(self._refs.path, b"refs"))) - def test_setitem_packed(self): + def test_setitem_packed(self) -> None: with open(os.path.join(self._refs.path, b"packed-refs"), "w") as f: f.write("# pack-refs with: peeled fully-peeled sorted \n") f.write("42d06bd4b77fed026b154d16493e5deab78f02ec refs/heads/packed\n") @@ -462,7 +462,7 @@ def test_setitem_packed(self): self._refs.get_packed_refs(), ) - def test_add_packed_refs(self): + def test_add_packed_refs(self) -> None: # first, create a non-packed ref self._refs[b"refs/heads/packed"] = b"3ec9c43c84ff242e3ef4a9fc5bc111fd780a76a8" @@ -516,7 +516,7 @@ def test_add_packed_refs(self): self.assertFalse(os.path.exists(packed_refs_file_path)) - def test_setitem_symbolic(self): + def test_setitem_symbolic(self) -> None: ones = b"1" * 40 self._refs[b"HEAD"] = ones self.assertEqual(ones, self._refs[b"HEAD"]) @@ -532,7 +532,7 @@ def test_setitem_symbolic(self): self.assertEqual(ones, f.read()[:40]) f.close() - def test_set_if_equals(self): + def test_set_if_equals(self) -> None: RefsContainerTests.test_set_if_equals(self) # ensure symref was followed @@ -546,7 +546,7 @@ def test_set_if_equals(self): ) self.assertFalse(os.path.exists(os.path.join(self._refs.path, b"HEAD.lock"))) - def test_add_if_new_packed(self): + def test_add_if_new_packed(self) -> None: # don't overwrite packed ref self.assertFalse(self._refs.add_if_new(b"refs/tags/refs-0.1", b"9" * 40)) self.assertEqual( @@ -554,7 +554,7 @@ def test_add_if_new_packed(self): self._refs[b"refs/tags/refs-0.1"], ) - def test_add_if_new_symbolic(self): + def test_add_if_new_symbolic(self) -> None: # Use an empty repo instead of the default. repo_dir = os.path.join(tempfile.mkdtemp(), "test") os.makedirs(repo_dir) @@ -573,7 +573,7 @@ def test_add_if_new_symbolic(self): self.assertEqual(nines, refs[b"HEAD"]) self.assertEqual(nines, refs[b"refs/heads/master"]) - def test_follow(self): + def test_follow(self) -> None: self.assertEqual( ( [b"HEAD", b"refs/heads/master"], @@ -590,7 +590,7 @@ def test_follow(self): ) self.assertRaises(SymrefLoop, self._refs.follow, b"refs/heads/loop") - def test_set_overwrite_loop(self): + def test_set_overwrite_loop(self) -> None: self.assertRaises(SymrefLoop, self._refs.follow, b"refs/heads/loop") self._refs[b"refs/heads/loop"] = b"42d06bd4b77fed026b154d16493e5deab78f02ec" self.assertEqual( @@ -598,13 +598,13 @@ def test_set_overwrite_loop(self): self._refs.follow(b"refs/heads/loop"), ) - def test_delitem(self): + def test_delitem(self) -> None: RefsContainerTests.test_delitem(self) ref_file = os.path.join(self._refs.path, b"refs", b"heads", b"master") self.assertFalse(os.path.exists(ref_file)) self.assertNotIn(b"refs/heads/master", self._refs.get_packed_refs()) - def test_delitem_symbolic(self): + def test_delitem_symbolic(self) -> None: self.assertEqual(b"ref: refs/heads/master", self._refs.read_loose_ref(b"HEAD")) del self._refs[b"HEAD"] self.assertRaises(KeyError, lambda: self._refs[b"HEAD"]) @@ -614,7 +614,7 @@ def test_delitem_symbolic(self): ) self.assertFalse(os.path.exists(os.path.join(self._refs.path, b"HEAD"))) - def test_remove_if_equals_symref(self): + def test_remove_if_equals_symref(self) -> None: # HEAD is a symref, so shouldn't equal its dereferenced value self.assertFalse( self._refs.remove_if_equals( @@ -640,7 +640,7 @@ def test_remove_if_equals_symref(self): ) self.assertFalse(os.path.exists(os.path.join(self._refs.path, b"HEAD.lock"))) - def test_remove_packed_without_peeled(self): + def test_remove_packed_without_peeled(self) -> None: refs_file = os.path.join(self._repo.path, "packed-refs") f = GitFile(refs_file) refs_data = f.read() @@ -663,7 +663,7 @@ def test_remove_packed_without_peeled(self): ) ) - def test_remove_if_equals_packed(self): + def test_remove_if_equals_packed(self) -> None: # test removing ref that is only packed self.assertEqual( b"df6800012397fb85c56e7418dd4eb9405dee075c", @@ -677,7 +677,7 @@ def test_remove_if_equals_packed(self): ) self.assertRaises(KeyError, lambda: self._refs[b"refs/tags/refs-0.1"]) - def test_remove_parent(self): + def test_remove_parent(self) -> None: self._refs[b"refs/heads/foo/bar"] = b"df6800012397fb85c56e7418dd4eb9405dee075c" del self._refs[b"refs/heads/foo/bar"] ref_file = os.path.join( @@ -694,7 +694,7 @@ def test_remove_parent(self): self.assertTrue(os.path.exists(ref_file)) self._refs[b"refs/heads/foo"] = b"df6800012397fb85c56e7418dd4eb9405dee075c" - def test_read_ref(self): + def test_read_ref(self) -> None: self.assertEqual(b"ref: refs/heads/master", self._refs.read_ref(b"HEAD")) self.assertEqual( b"42d06bd4b77fed026b154d16493e5deab78f02ec", @@ -702,12 +702,12 @@ def test_read_ref(self): ) self.assertEqual(None, self._refs.read_ref(b"nonexistent")) - def test_read_loose_ref(self): + def test_read_loose_ref(self) -> None: self._refs[b"refs/heads/foo"] = b"df6800012397fb85c56e7418dd4eb9405dee075c" self.assertEqual(None, self._refs.read_ref(b"refs/heads/foo/bar")) - def test_non_ascii(self): + def test_non_ascii(self) -> None: try: encoded_ref = os.fsencode("refs/tags/schön") except UnicodeEncodeError as exc: @@ -724,7 +724,7 @@ def test_non_ascii(self): self.assertEqual(expected_refs, self._repo.get_refs()) - def test_cyrillic(self): + def test_cyrillic(self) -> None: if sys.platform in ("darwin", "win32"): raise SkipTest("filesystem encoding doesn't support arbitrary bytes") # reported in https://github.com/dulwich/dulwich/issues/608 @@ -757,7 +757,7 @@ def test_cyrillic(self): class InfoRefsContainerTests(TestCase): - def test_invalid_refname(self): + def test_invalid_refname(self) -> None: text = _TEST_REFS_SERIALIZED + b"00" * 20 + b"\trefs/stash\n" refs = InfoRefsContainer(BytesIO(text)) expected_refs = dict(_TEST_REFS) @@ -766,7 +766,7 @@ def test_invalid_refname(self): del expected_refs[b"refs/heads/loop"] self.assertEqual(expected_refs, refs.as_dict()) - def test_keys(self): + def test_keys(self) -> None: refs = InfoRefsContainer(BytesIO(_TEST_REFS_SERIALIZED)) actual_keys = set(refs.keys()) self.assertEqual(set(refs.allkeys()), actual_keys) @@ -783,7 +783,7 @@ def test_keys(self): ) self.assertEqual([b"refs-0.1", b"refs-0.2"], sorted(refs.keys(b"refs/tags"))) - def test_as_dict(self): + def test_as_dict(self) -> None: refs = InfoRefsContainer(BytesIO(_TEST_REFS_SERIALIZED)) # refs/heads/loop does not show up even if it exists expected_refs = dict(_TEST_REFS) @@ -791,12 +791,12 @@ def test_as_dict(self): del expected_refs[b"refs/heads/loop"] self.assertEqual(expected_refs, refs.as_dict()) - def test_contains(self): + def test_contains(self) -> None: refs = InfoRefsContainer(BytesIO(_TEST_REFS_SERIALIZED)) self.assertIn(b"refs/heads/master", refs) self.assertNotIn(b"refs/heads/bar", refs) - def test_get_peeled(self): + def test_get_peeled(self) -> None: refs = InfoRefsContainer(BytesIO(_TEST_REFS_SERIALIZED)) # refs/heads/loop does not show up even if it exists self.assertEqual( @@ -806,10 +806,10 @@ def test_get_peeled(self): class ParseSymrefValueTests(TestCase): - def test_valid(self): + def test_valid(self) -> None: self.assertEqual(b"refs/heads/foo", parse_symref_value(b"ref: refs/heads/foo")) - def test_invalid(self): + def test_invalid(self) -> None: self.assertRaises(ValueError, parse_symref_value, b"foobar") @@ -829,11 +829,11 @@ class StripPeeledRefsTests(TestCase): b"refs/tags/2.0.0": b"0749936d0956c661ac8f8d3483774509c165f89e", } - def test_strip_peeled_refs(self): + def test_strip_peeled_refs(self) -> None: # Simple check of two dicts self.assertEqual(strip_peeled_refs(self.all_refs), self.non_peeled_refs) - def test_split_peeled_refs(self): + def test_split_peeled_refs(self) -> None: (regular, peeled) = split_peeled_refs(self.all_refs) self.assertEqual(regular, self.non_peeled_refs) self.assertEqual( diff --git a/tests/test_repository.py b/tests/test_repository.py index d28f17421..d2c520773 100644 --- a/tests/test_repository.py +++ b/tests/test_repository.py @@ -49,7 +49,7 @@ class CreateRepositoryTests(TestCase): - def assertFileContentsEqual(self, expected, repo, path): + def assertFileContentsEqual(self, expected, repo, path) -> None: f = repo.get_named_file(path) if not f: self.assertEqual(expected, None) @@ -57,7 +57,7 @@ def assertFileContentsEqual(self, expected, repo, path): with f: self.assertEqual(expected, f.read()) - def _check_repo_contents(self, repo, expect_bare): + def _check_repo_contents(self, repo, expect_bare) -> None: self.assertEqual(expect_bare, repo.bare) self.assertFileContentsEqual(b"Unnamed repository", repo, "description") self.assertFileContentsEqual(b"", repo, os.path.join("info", "exclude")) @@ -87,25 +87,25 @@ def _check_repo_contents(self, repo, expect_bare): self.assertEqual(expected, actual) - def test_create_memory(self): + def test_create_memory(self) -> None: repo = MemoryRepo.init_bare([], {}) self._check_repo_contents(repo, True) - def test_create_disk_bare(self): + def test_create_disk_bare(self) -> None: tmp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) repo = Repo.init_bare(tmp_dir) self.assertEqual(tmp_dir, repo._controldir) self._check_repo_contents(repo, True) - def test_create_disk_non_bare(self): + def test_create_disk_non_bare(self) -> None: tmp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) repo = Repo.init(tmp_dir) self.assertEqual(os.path.join(tmp_dir, ".git"), repo._controldir) self._check_repo_contents(repo, False) - def test_create_disk_non_bare_mkdir(self): + def test_create_disk_non_bare_mkdir(self) -> None: tmp_dir = tempfile.mkdtemp() target_dir = os.path.join(tmp_dir, "target") self.addCleanup(shutil.rmtree, tmp_dir) @@ -113,7 +113,7 @@ def test_create_disk_non_bare_mkdir(self): self.assertEqual(os.path.join(target_dir, ".git"), repo._controldir) self._check_repo_contents(repo, False) - def test_create_disk_bare_mkdir(self): + def test_create_disk_bare_mkdir(self) -> None: tmp_dir = tempfile.mkdtemp() target_dir = os.path.join(tmp_dir, "target") self.addCleanup(shutil.rmtree, tmp_dir) @@ -123,13 +123,13 @@ def test_create_disk_bare_mkdir(self): class MemoryRepoTests(TestCase): - def test_set_description(self): + def test_set_description(self) -> None: r = MemoryRepo.init_bare([], {}) description = b"Some description" r.set_description(description) self.assertEqual(description, r.get_description()) - def test_pull_into(self): + def test_pull_into(self) -> None: r = MemoryRepo.init_bare([], {}) repo = open_repo("a.git") self.addCleanup(tear_down_repo, repo) @@ -146,18 +146,18 @@ def open_repo(self, name): self.addCleanup(tear_down_repo, repo) return repo - def test_simple_props(self): + def test_simple_props(self) -> None: r = self.open_repo("a.git") self.assertEqual(r.controldir(), r.path) - def test_setitem(self): + def test_setitem(self) -> None: r = self.open_repo("a.git") r[b"refs/tags/foo"] = b"a90fa2d900a17e99b433217e988c4eb4a2e9a097" self.assertEqual( b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", r[b"refs/tags/foo"].id ) - def test_getitem_unicode(self): + def test_getitem_unicode(self) -> None: r = self.open_repo("a.git") test_keys = [ @@ -182,7 +182,7 @@ def test_getitem_unicode(self): 12, ) - def test_delitem(self): + def test_delitem(self) -> None: r = self.open_repo("a.git") del r[b"refs/heads/master"] @@ -193,7 +193,7 @@ def test_delitem(self): self.assertRaises(ValueError, r.__delitem__, b"notrefs/foo") - def test_get_refs(self): + def test_get_refs(self) -> None: r = self.open_repo("a.git") self.assertEqual( { @@ -205,49 +205,49 @@ def test_get_refs(self): r.get_refs(), ) - def test_head(self): + def test_head(self) -> None: r = self.open_repo("a.git") self.assertEqual(r.head(), b"a90fa2d900a17e99b433217e988c4eb4a2e9a097") - def test_get_object(self): + def test_get_object(self) -> None: r = self.open_repo("a.git") obj = r.get_object(r.head()) self.assertEqual(obj.type_name, b"commit") - def test_get_object_non_existant(self): + def test_get_object_non_existant(self) -> None: r = self.open_repo("a.git") self.assertRaises(KeyError, r.get_object, missing_sha) - def test_contains_object(self): + def test_contains_object(self) -> None: r = self.open_repo("a.git") self.assertIn(r.head(), r) self.assertNotIn(b"z" * 40, r) - def test_contains_ref(self): + def test_contains_ref(self) -> None: r = self.open_repo("a.git") self.assertIn(b"HEAD", r) - def test_get_no_description(self): + def test_get_no_description(self) -> None: r = self.open_repo("a.git") self.assertIs(None, r.get_description()) - def test_get_description(self): + def test_get_description(self) -> None: r = self.open_repo("a.git") with open(os.path.join(r.path, "description"), "wb") as f: f.write(b"Some description") self.assertEqual(b"Some description", r.get_description()) - def test_set_description(self): + def test_set_description(self) -> None: r = self.open_repo("a.git") description = b"Some description" r.set_description(description) self.assertEqual(description, r.get_description()) - def test_contains_missing(self): + def test_contains_missing(self) -> None: r = self.open_repo("a.git") self.assertNotIn(b"bar", r) - def test_get_peeled(self): + def test_get_peeled(self) -> None: # unpacked ref r = self.open_repo("a.git") tag_sha = b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a" @@ -262,11 +262,11 @@ def test_get_peeled(self): # TODO: add more corner cases to test repo - def test_get_peeled_not_tag(self): + def test_get_peeled_not_tag(self) -> None: r = self.open_repo("a.git") self.assertEqual(r.get_peeled(b"HEAD"), r.head()) - def test_get_parents(self): + def test_get_parents(self) -> None: r = self.open_repo("a.git") self.assertEqual( [b"2a72d929692c41d8554c07f6301757ba18a65d91"], @@ -275,7 +275,7 @@ def test_get_parents(self): r.update_shallow([b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"], None) self.assertEqual([], r.get_parents(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097")) - def test_get_walker(self): + def test_get_walker(self) -> None: r = self.open_repo("a.git") # include defaults to [r.head()] self.assertEqual( @@ -297,7 +297,7 @@ def test_get_walker(self): [b"2a72d929692c41d8554c07f6301757ba18a65d91"], ) - def assertFilesystemHidden(self, path): + def assertFilesystemHidden(self, path) -> None: if sys.platform != "win32": return import ctypes @@ -308,7 +308,7 @@ def assertFilesystemHidden(self, path): ) self.assertTrue(2 & GetFileAttributesW(path)) - def test_init_existing(self): + def test_init_existing(self) -> None: tmp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) t = Repo.init(tmp_dir) @@ -316,7 +316,7 @@ def test_init_existing(self): self.assertEqual(os.listdir(tmp_dir), [".git"]) self.assertFilesystemHidden(os.path.join(tmp_dir, ".git")) - def test_init_mkdir(self): + def test_init_mkdir(self) -> None: tmp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) repo_dir = os.path.join(tmp_dir, "a-repo") @@ -326,7 +326,7 @@ def test_init_mkdir(self): self.assertEqual(os.listdir(repo_dir), [".git"]) self.assertFilesystemHidden(os.path.join(repo_dir, ".git")) - def test_init_mkdir_unicode(self): + def test_init_mkdir_unicode(self) -> None: repo_name = "\xa7" try: os.fsencode(repo_name) @@ -342,7 +342,7 @@ def test_init_mkdir_unicode(self): self.assertFilesystemHidden(os.path.join(repo_dir, ".git")) @skipIf(sys.platform == "win32", "fails on Windows") - def test_fetch(self): + def test_fetch(self) -> None: r = self.open_repo("a.git") tmp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) @@ -356,7 +356,7 @@ def test_fetch(self): self.assertIn(b"b0931cadc54336e78a1d980420e3268903b57a50", t) @skipIf(sys.platform == "win32", "fails on Windows") - def test_fetch_ignores_missing_refs(self): + def test_fetch_ignores_missing_refs(self) -> None: r = self.open_repo("a.git") missing = b"1234566789123456789123567891234657373833" r.refs[b"refs/heads/blah"] = missing @@ -372,7 +372,7 @@ def test_fetch_ignores_missing_refs(self): self.assertIn(b"b0931cadc54336e78a1d980420e3268903b57a50", t) self.assertNotIn(missing, t) - def test_clone(self): + def test_clone(self) -> None: r = self.open_repo("a.git") tmp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) @@ -402,7 +402,7 @@ def test_clone(self): c.get((b"remote", b"origin"), b"fetch"), ) - def test_clone_no_head(self): + def test_clone_no_head(self) -> None: temp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, temp_dir) repo_dir = os.path.join(os.path.dirname(__file__), "..", "testdata", "repos") @@ -422,7 +422,7 @@ def test_clone_no_head(self): t.refs.as_dict(), ) - def test_clone_empty(self): + def test_clone_empty(self) -> None: """Test clone() doesn't crash if HEAD points to a non-existing ref. This simulates cloning server-side bare repository either when it is @@ -435,7 +435,7 @@ def test_clone_empty(self): self.addCleanup(shutil.rmtree, tmp_dir) r.clone(tmp_dir, mkdir=False, bare=True) - def test_reset_index_symlink_enabled(self): + def test_reset_index_symlink_enabled(self) -> None: if sys.platform == "win32": self.skipTest("symlinks are not supported on Windows") tmp_dir = self.mkdtemp() @@ -456,7 +456,7 @@ def test_reset_index_symlink_enabled(self): self.assertEqual("foo", os.readlink(bar_path)) t.close() - def test_reset_index_symlink_disabled(self): + def test_reset_index_symlink_disabled(self) -> None: tmp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) @@ -472,14 +472,14 @@ def test_reset_index_symlink_disabled(self): t.close() - def test_clone_bare(self): + def test_clone_bare(self) -> None: r = self.open_repo("a.git") tmp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) t = r.clone(tmp_dir, mkdir=False) t.close() - def test_clone_checkout_and_bare(self): + def test_clone_checkout_and_bare(self) -> None: r = self.open_repo("a.git") tmp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) @@ -487,7 +487,7 @@ def test_clone_checkout_and_bare(self): ValueError, r.clone, tmp_dir, mkdir=False, checkout=True, bare=True ) - def test_clone_branch(self): + def test_clone_branch(self) -> None: r = self.open_repo("a.git") r.refs[b"refs/heads/mybranch"] = b"28237f4dc30d0d462658d6b937b08a0f0b6ef55a" tmp_dir = self.mkdtemp() @@ -502,7 +502,7 @@ def test_clone_branch(self): b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", ) - def test_clone_tag(self): + def test_clone_tag(self) -> None: r = self.open_repo("a.git") tmp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) @@ -517,7 +517,7 @@ def test_clone_tag(self): b"a90fa2d900a17e99b433217e988c4eb4a2e9a097", ) - def test_clone_invalid_branch(self): + def test_clone_invalid_branch(self) -> None: r = self.open_repo("a.git") tmp_dir = self.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) @@ -529,7 +529,7 @@ def test_clone_invalid_branch(self): branch=b"mybranch", ) - def test_merge_history(self): + def test_merge_history(self) -> None: r = self.open_repo("simple_merge.git") shas = [e.commit.id for e in r.get_walker()] self.assertEqual( @@ -543,7 +543,7 @@ def test_merge_history(self): ], ) - def test_out_of_order_merge(self): + def test_out_of_order_merge(self) -> None: """Test that revision history is ordered by date, not parent order.""" r = self.open_repo("ooo_merge.git") shas = [e.commit.id for e in r.get_walker()] @@ -557,19 +557,19 @@ def test_out_of_order_merge(self): ], ) - def test_get_tags_empty(self): + def test_get_tags_empty(self) -> None: r = self.open_repo("ooo_merge.git") self.assertEqual({}, r.refs.as_dict(b"refs/tags")) - def test_get_config(self): + def test_get_config(self) -> None: r = self.open_repo("ooo_merge.git") self.assertIsInstance(r.get_config(), Config) - def test_get_config_stack(self): + def test_get_config_stack(self) -> None: r = self.open_repo("ooo_merge.git") self.assertIsInstance(r.get_config_stack(), Config) - def test_common_revisions(self): + def test_common_revisions(self) -> None: """This test demonstrates that ``find_common_revisions()`` actually returns common heads, not revisions; dulwich already uses ``find_common_revisions()`` in such a manner (see @@ -618,7 +618,7 @@ def test_common_revisions(self): shas = r1.object_store.find_common_revisions(r2.get_graph_walker()) self.assertEqual(set(shas), expected_shas) - def test_shell_hook_pre_commit(self): + def test_shell_hook_pre_commit(self) -> None: if os.name != "posix": self.skipTest("shell hook tests requires POSIX shell") @@ -668,7 +668,7 @@ def test_shell_hook_pre_commit(self): ) self.assertEqual([], r[commit_sha].parents) - def test_shell_hook_commit_msg(self): + def test_shell_hook_commit_msg(self) -> None: if os.name != "posix": self.skipTest("shell hook tests requires POSIX shell") @@ -718,7 +718,7 @@ def test_shell_hook_commit_msg(self): ) self.assertEqual([], r[commit_sha].parents) - def test_shell_hook_pre_commit_add_files(self): + def test_shell_hook_pre_commit_add_files(self) -> None: if os.name != "posix": self.skipTest("shell hook tests requires POSIX shell") @@ -767,7 +767,7 @@ def test_shell_hook_pre_commit_add_files(self): tree = r[r[commit_sha].tree] self.assertEqual({b"blah", b"foo"}, set(tree)) - def test_shell_hook_post_commit(self): + def test_shell_hook_post_commit(self) -> None: if os.name != "posix": self.skipTest("shell hook tests requires POSIX shell") @@ -851,8 +851,8 @@ def test_shell_hook_post_commit(self): ) self.assertEqual([commit_sha], r[commit_sha2].parents) - def test_as_dict(self): - def check(repo): + def test_as_dict(self) -> None: + def check(repo) -> None: self.assertEqual( repo.refs.subkeys(b"refs/tags"), repo.refs.subkeys(b"refs/tags/"), @@ -873,7 +873,7 @@ def check(repo): check(nonbare) check(bare) - def test_working_tree(self): + def test_working_tree(self) -> None: temp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, temp_dir) worktree_temp_dir = tempfile.mkdtemp() @@ -919,7 +919,7 @@ class BuildRepoRootTests(TestCase): def get_repo_dir(self): return os.path.join(tempfile.mkdtemp(), "test") - def setUp(self): + def setUp(self) -> None: super().setUp() self._repo_dir = self.get_repo_dir() os.makedirs(self._repo_dir) @@ -944,7 +944,7 @@ def setUp(self): self.assertEqual([], r[commit_sha].parents) self._root_commit = commit_sha - def test_get_shallow(self): + def test_get_shallow(self) -> None: self.assertEqual(set(), self._repo.get_shallow()) with open(os.path.join(self._repo.path, ".git", "shallow"), "wb") as f: f.write(b"a90fa2d900a17e99b433217e988c4eb4a2e9a097\n") @@ -953,7 +953,7 @@ def test_get_shallow(self): self._repo.get_shallow(), ) - def test_update_shallow(self): + def test_update_shallow(self) -> None: self._repo.update_shallow(None, None) # no op self.assertEqual(set(), self._repo.get_shallow()) self._repo.update_shallow([b"a90fa2d900a17e99b433217e988c4eb4a2e9a097"], None) @@ -976,7 +976,7 @@ def test_update_shallow(self): os.path.exists(os.path.join(self._repo.controldir(), "shallow")), ) - def test_build_repo(self): + def test_build_repo(self) -> None: r = self._repo self.assertEqual(b"ref: refs/heads/master", r.refs.read_ref(b"HEAD")) self.assertEqual(self._root_commit, r.refs[b"refs/heads/master"]) @@ -985,7 +985,7 @@ def test_build_repo(self): actual_commit = r[self._root_commit] self.assertEqual(b"msg", actual_commit.message) - def test_commit_modified(self): + def test_commit_modified(self) -> None: r = self._repo with open(os.path.join(r.path, "a"), "wb") as f: f.write(b"new contents") @@ -1005,7 +1005,7 @@ def test_commit_modified(self): self.assertEqual(b"new contents", r[a_id].data) @skipIf(not getattr(os, "symlink", None), "Requires symlink support") - def test_commit_symlink(self): + def test_commit_symlink(self) -> None: r = self._repo os.symlink("a", os.path.join(r.path, "b")) r.stage(["a", "b"]) @@ -1023,7 +1023,7 @@ def test_commit_symlink(self): self.assertTrue(stat.S_ISLNK(b_mode)) self.assertEqual(b"a", r[b_id].data) - def test_commit_merge_heads_file(self): + def test_commit_merge_heads_file(self) -> None: tmp_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, tmp_dir) r = Repo.init(tmp_dir) @@ -1057,7 +1057,7 @@ def test_commit_merge_heads_file(self): r[commit_sha].parents, ) - def test_commit_deleted(self): + def test_commit_deleted(self) -> None: r = self._repo os.remove(os.path.join(r.path, "a")) r.stage(["a"]) @@ -1075,7 +1075,7 @@ def test_commit_deleted(self): tree = r[r[commit_sha].tree] self.assertEqual([], list(tree.iteritems())) - def test_commit_follows(self): + def test_commit_follows(self) -> None: r = self._repo r.refs.set_symbolic_ref(b"HEAD", b"refs/heads/bla") commit_sha = r.do_commit( @@ -1090,7 +1090,7 @@ def test_commit_follows(self): ) self.assertEqual(commit_sha, r[b"refs/heads/bla"].id) - def test_commit_encoding(self): + def test_commit_encoding(self) -> None: r = self._repo commit_sha = r.do_commit( b"commit with strange character \xee", @@ -1104,7 +1104,7 @@ def test_commit_encoding(self): ) self.assertEqual(b"iso8859-1", r[commit_sha].encoding) - def test_compression_level(self): + def test_compression_level(self) -> None: r = self._repo c = r.get_config() c.set(("core",), "compression", "3") @@ -1113,21 +1113,21 @@ def test_compression_level(self): r = Repo(self._repo_dir) self.assertEqual(r.object_store.loose_compression_level, 4) - def test_repositoryformatversion_unsupported(self): + def test_repositoryformatversion_unsupported(self) -> None: r = self._repo c = r.get_config() c.set(("core",), "repositoryformatversion", "2") c.write_to_path() self.assertRaises(UnsupportedVersion, Repo, self._repo_dir) - def test_repositoryformatversion_1(self): + def test_repositoryformatversion_1(self) -> None: r = self._repo c = r.get_config() c.set(("core",), "repositoryformatversion", "1") c.write_to_path() Repo(self._repo_dir) - def test_worktreeconfig_extension(self): + def test_worktreeconfig_extension(self) -> None: r = self._repo c = r.get_config() c.set(("core",), "repositoryformatversion", "1") @@ -1140,7 +1140,7 @@ def test_worktreeconfig_extension(self): cs = r.get_config_stack() self.assertEqual(cs.get(("user",), "name"), b"Jelmer") - def test_worktreeconfig_extension_case(self): + def test_worktreeconfig_extension_case(self) -> None: """Test that worktree code does not error for alternate case format.""" r = self._repo c = r.get_config() @@ -1156,7 +1156,7 @@ def test_worktreeconfig_extension_case(self): # https://github.com/jelmer/dulwich/issues/1285 was addressed Repo(self._repo_dir) - def test_repositoryformatversion_1_extension(self): + def test_repositoryformatversion_1_extension(self) -> None: r = self._repo c = r.get_config() c.set(("core",), "repositoryformatversion", "1") @@ -1164,7 +1164,7 @@ def test_repositoryformatversion_1_extension(self): c.write_to_path() self.assertRaises(UnsupportedExtension, Repo, self._repo_dir) - def test_commit_encoding_from_config(self): + def test_commit_encoding_from_config(self) -> None: r = self._repo c = r.get_config() c.set(("i18n",), "commitEncoding", "iso8859-1") @@ -1180,7 +1180,7 @@ def test_commit_encoding_from_config(self): ) self.assertEqual(b"iso8859-1", r[commit_sha].encoding) - def test_commit_config_identity(self): + def test_commit_config_identity(self) -> None: # commit falls back to the users' identity if it wasn't specified r = self._repo c = r.get_config() @@ -1191,7 +1191,7 @@ def test_commit_config_identity(self): self.assertEqual(b"Jelmer ", r[commit_sha].author) self.assertEqual(b"Jelmer ", r[commit_sha].committer) - def test_commit_config_identity_strips_than(self): + def test_commit_config_identity_strips_than(self) -> None: # commit falls back to the users' identity if it wasn't specified, # and strips superfluous <> r = self._repo @@ -1203,7 +1203,7 @@ def test_commit_config_identity_strips_than(self): self.assertEqual(b"Jelmer ", r[commit_sha].author) self.assertEqual(b"Jelmer ", r[commit_sha].committer) - def test_commit_config_identity_in_memoryrepo(self): + def test_commit_config_identity_in_memoryrepo(self) -> None: # commit falls back to the users' identity if it wasn't specified r = MemoryRepo.init_bare([], {}) c = r.get_config() @@ -1214,7 +1214,7 @@ def test_commit_config_identity_in_memoryrepo(self): self.assertEqual(b"Jelmer ", r[commit_sha].author) self.assertEqual(b"Jelmer ", r[commit_sha].committer) - def test_commit_config_identity_from_env(self): + def test_commit_config_identity_from_env(self) -> None: # commit falls back to the users' identity if it wasn't specified self.overrideEnv("GIT_COMMITTER_NAME", "joe") self.overrideEnv("GIT_COMMITTER_EMAIL", "joe@example.com") @@ -1227,15 +1227,15 @@ def test_commit_config_identity_from_env(self): self.assertEqual(b"Jelmer ", r[commit_sha].author) self.assertEqual(b"joe ", r[commit_sha].committer) - def test_commit_fail_ref(self): + def test_commit_fail_ref(self) -> None: r = self._repo - def set_if_equals(name, old_ref, new_ref, **kwargs): + def set_if_equals(name, old_ref, new_ref, **kwargs) -> bool: return False r.refs.set_if_equals = set_if_equals - def add_if_new(name, new_ref, **kwargs): + def add_if_new(name, new_ref, **kwargs) -> None: self.fail("Unexpected call to add_if_new") r.refs.add_if_new = add_if_new @@ -1259,7 +1259,7 @@ def add_if_new(name, new_ref, **kwargs): self.assertEqual(r[self._root_commit].tree, new_commit.tree) self.assertEqual(b"failed commit", new_commit.message) - def test_commit_branch(self): + def test_commit_branch(self) -> None: r = self._repo commit_sha = r.do_commit( @@ -1293,7 +1293,7 @@ def test_commit_branch(self): self.assertEqual(commit_sha, r[b"refs/heads/new_branch"].id) self.assertEqual([new_branch_head], r[commit_sha].parents) - def test_commit_merge_heads(self): + def test_commit_merge_heads(self) -> None: r = self._repo merge_1 = r.do_commit( b"commit to branch 2", @@ -1317,7 +1317,7 @@ def test_commit_merge_heads(self): ) self.assertEqual([self._root_commit, merge_1], r[commit_sha].parents) - def test_commit_dangling_commit(self): + def test_commit_dangling_commit(self) -> None: r = self._repo old_shas = set(r.object_store) @@ -1341,7 +1341,7 @@ def test_commit_dangling_commit(self): self.assertEqual([], r[commit_sha].parents) self.assertEqual(old_refs, r.get_refs()) - def test_commit_dangling_commit_with_parents(self): + def test_commit_dangling_commit_with_parents(self) -> None: r = self._repo old_shas = set(r.object_store) @@ -1366,32 +1366,32 @@ def test_commit_dangling_commit_with_parents(self): self.assertEqual([self._root_commit], r[commit_sha].parents) self.assertEqual(old_refs, r.get_refs()) - def test_stage_absolute(self): + def test_stage_absolute(self) -> None: r = self._repo os.remove(os.path.join(r.path, "a")) self.assertRaises(ValueError, r.stage, [os.path.join(r.path, "a")]) - def test_stage_deleted(self): + def test_stage_deleted(self) -> None: r = self._repo os.remove(os.path.join(r.path, "a")) r.stage(["a"]) r.stage(["a"]) # double-stage a deleted path self.assertEqual([], list(r.open_index())) - def test_stage_directory(self): + def test_stage_directory(self) -> None: r = self._repo os.mkdir(os.path.join(r.path, "c")) r.stage(["c"]) self.assertEqual([b"a"], list(r.open_index())) - def test_stage_submodule(self): + def test_stage_submodule(self) -> None: r = self._repo s = Repo.init(os.path.join(r.path, "sub"), mkdir=True) s.do_commit(b"message") r.stage(["sub"]) self.assertEqual([b"a", b"sub"], list(r.open_index())) - def test_unstage_midify_file_with_dir(self): + def test_unstage_midify_file_with_dir(self) -> None: os.mkdir(os.path.join(self._repo.path, "new_dir")) full_path = os.path.join(self._repo.path, "new_dir", "foo") @@ -1412,7 +1412,7 @@ def test_unstage_midify_file_with_dir(self): [{"add": [], "delete": [], "modify": []}, [b"new_dir/foo"], []], status ) - def test_unstage_while_no_commit(self): + def test_unstage_while_no_commit(self) -> None: file = "foo" full_path = os.path.join(self._repo.path, file) with open(full_path, "w") as f: @@ -1422,7 +1422,7 @@ def test_unstage_while_no_commit(self): status = list(porcelain.status(self._repo)) self.assertEqual([{"add": [], "delete": [], "modify": []}, [], ["foo"]], status) - def test_unstage_add_file(self): + def test_unstage_add_file(self) -> None: file = "foo" full_path = os.path.join(self._repo.path, file) porcelain.commit( @@ -1438,7 +1438,7 @@ def test_unstage_add_file(self): status = list(porcelain.status(self._repo)) self.assertEqual([{"add": [], "delete": [], "modify": []}, [], ["foo"]], status) - def test_unstage_modify_file(self): + def test_unstage_modify_file(self) -> None: file = "foo" full_path = os.path.join(self._repo.path, file) with open(full_path, "w") as f: @@ -1460,7 +1460,7 @@ def test_unstage_modify_file(self): [{"add": [], "delete": [], "modify": []}, [b"foo"], []], status ) - def test_unstage_remove_file(self): + def test_unstage_remove_file(self) -> None: file = "foo" full_path = os.path.join(self._repo.path, file) with open(full_path, "w") as f: @@ -1479,7 +1479,7 @@ def test_unstage_remove_file(self): [{"add": [], "delete": [], "modify": []}, [b"foo"], []], status ) - def test_reset_index(self): + def test_reset_index(self) -> None: r = self._repo with open(os.path.join(r.path, "a"), "wb") as f: f.write(b"changed") @@ -1498,7 +1498,7 @@ def test_reset_index(self): sys.platform in ("win32", "darwin"), "tries to implicitly decode as utf8", ) - def test_commit_no_encode_decode(self): + def test_commit_no_encode_decode(self) -> None: r = self._repo repo_path_bytes = os.fsencode(r.path) encodings = ("utf8", "latin1") @@ -1529,25 +1529,25 @@ def test_commit_no_encode_decode(self): self.assertEqual(stat.S_IFREG | 0o644, mode) self.assertEqual(encoding.encode("ascii"), r[id].data) - def test_discover_intended(self): + def test_discover_intended(self) -> None: path = os.path.join(self._repo_dir, "b/c") r = Repo.discover(path) self.assertEqual(r.head(), self._repo.head()) - def test_discover_isrepo(self): + def test_discover_isrepo(self) -> None: r = Repo.discover(self._repo_dir) self.assertEqual(r.head(), self._repo.head()) - def test_discover_notrepo(self): + def test_discover_notrepo(self) -> None: with self.assertRaises(NotGitRepository): Repo.discover("/") class CheckUserIdentityTests(TestCase): - def test_valid(self): + def test_valid(self) -> None: check_user_identity(b"Me ") - def test_invalid(self): + def test_invalid(self) -> None: self.assertRaises(InvalidUserIdentity, check_user_identity, b"No Email") self.assertRaises( InvalidUserIdentity, check_user_identity, b"Fullname None: self._output: list[bytes] = [] self._received: dict[int, list[bytes]] = {0: [], 1: [], 2: [], 3: []} - def set_output(self, output_lines): + def set_output(self, output_lines) -> None: self._output = output_lines def read_pkt_line(self): @@ -83,10 +83,10 @@ def read_pkt_line(self): else: raise HangupException - def write_sideband(self, band, data): + def write_sideband(self, band, data) -> None: self._received[band].append(data) - def write_pkt_line(self, data): + def write_pkt_line(self, data) -> None: self._received[0].append(data) def get_received_line(self, band=0): @@ -108,23 +108,23 @@ def required_capabilities(cls): class HandlerTestCase(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self._handler = TestGenericPackHandler() - def assertSucceeds(self, func, *args, **kwargs): + def assertSucceeds(self, func, *args, **kwargs) -> None: try: func(*args, **kwargs) except GitProtocolError as e: self.fail(e) - def test_capability_line(self): + def test_capability_line(self) -> None: self.assertEqual( b" cap1 cap2 cap3", format_capability_line([b"cap1", b"cap2", b"cap3"]), ) - def test_set_client_capabilities(self): + def test_set_client_capabilities(self) -> None: set_caps = self._handler.set_client_capabilities self.assertSucceeds(set_caps, [b"cap2"]) self.assertSucceeds(set_caps, [b"cap1", b"cap2"]) @@ -142,7 +142,7 @@ def test_set_client_capabilities(self): self._handler.innocuous_capabilities = lambda: (b"ignoreme",) self.assertSucceeds(set_caps, [b"cap2", b"ignoreme"]) - def test_has_capability(self): + def test_has_capability(self) -> None: self.assertRaises(GitProtocolError, self._handler.has_capability, b"cap") caps = self._handler.capabilities() self._handler.set_client_capabilities(caps) @@ -152,7 +152,7 @@ def test_has_capability(self): class UploadPackHandlerTestCase(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.path = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.path) @@ -163,7 +163,7 @@ def setUp(self): backend, [b"/", b"host=lolcathost"], TestProto() ) - def test_progress(self): + def test_progress(self) -> None: caps = self._handler.required_capabilities() self._handler.set_client_capabilities(caps) self._handler._start_pack_send_phase() @@ -173,14 +173,14 @@ def test_progress(self): self.assertEqual(b"second message", self._handler.proto.get_received_line(2)) self.assertRaises(IndexError, self._handler.proto.get_received_line, 2) - def test_no_progress(self): + def test_no_progress(self) -> None: caps = [*list(self._handler.required_capabilities()), b"no-progress"] self._handler.set_client_capabilities(caps) self._handler.progress(b"first message") self._handler.progress(b"second message") self.assertRaises(IndexError, self._handler.proto.get_received_line, 2) - def test_get_tagged(self): + def test_get_tagged(self) -> None: refs = { b"refs/tags/tag1": ONE, b"refs/tags/tag2": TWO, @@ -209,7 +209,7 @@ def test_get_tagged(self): self._handler.set_client_capabilities(caps) self.assertEqual({}, self._handler.get_tagged(refs, repo=self._repo)) - def test_nothing_to_do_but_wants(self): + def test_nothing_to_do_but_wants(self) -> None: # Just the fact that the client claims to want an object is enough # for sending a pack. Even if there turns out to be nothing. refs = {b"refs/tags/tag1": ONE} @@ -231,7 +231,7 @@ def test_nothing_to_do_but_wants(self): # The server should always send a pack, even if it's empty. self.assertTrue(self._handler.proto.get_received_line(1).startswith(b"PACK")) - def test_nothing_to_do_no_wants(self): + def test_nothing_to_do_no_wants(self) -> None: # Don't send a pack if the client didn't ask for anything. refs = {b"refs/tags/tag1": ONE} tree = Tree() @@ -247,7 +247,7 @@ def test_nothing_to_do_no_wants(self): class FindShallowTests(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self._store = MemoryObjectStore() @@ -264,10 +264,10 @@ def make_linear_commits(self, n, message=b""): parents = [commits[-1].id] return commits - def assertSameElements(self, expected, actual): + def assertSameElements(self, expected, actual) -> None: self.assertEqual(set(expected), set(actual)) - def test_linear(self): + def test_linear(self) -> None: c1, c2, c3 = self.make_linear_commits(3) self.assertEqual(({c3.id}, set()), _find_shallow(self._store, [c3.id], 1)) @@ -284,7 +284,7 @@ def test_linear(self): _find_shallow(self._store, [c3.id], 4), ) - def test_multiple_independent(self): + def test_multiple_independent(self) -> None: a = self.make_linear_commits(2, message=b"a") b = self.make_linear_commits(2, message=b"b") c = self.make_linear_commits(2, message=b"c") @@ -295,7 +295,7 @@ def test_multiple_independent(self): _find_shallow(self._store, heads, 2), ) - def test_multiple_overlapping(self): + def test_multiple_overlapping(self) -> None: # Create the following commit tree: # 1--2 # \ @@ -310,7 +310,7 @@ def test_multiple_overlapping(self): _find_shallow(self._store, [c2.id, c4.id], 3), ) - def test_merge(self): + def test_merge(self) -> None: c1 = self.make_commit() c2 = self.make_commit() c3 = self.make_commit(parents=[c1.id, c2.id]) @@ -320,7 +320,7 @@ def test_merge(self): _find_shallow(self._store, [c3.id], 2), ) - def test_tag(self): + def test_tag(self) -> None: c1, c2 = self.make_linear_commits(2) tag = make_tag(c2, name=b"tag") self._store.add_object(tag) @@ -338,7 +338,7 @@ def required_capabilities(self): class ReceivePackHandlerTestCase(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self._repo = MemoryRepo.init_bare([], {}) backend = DictBackend({b"/": self._repo}) @@ -346,7 +346,7 @@ def setUp(self): backend, [b"/", b"host=lolcathost"], TestProto() ) - def test_apply_pack_del_ref(self): + def test_apply_pack_del_ref(self) -> None: refs = {b"refs/heads/master": TWO, b"refs/heads/fake-branch": ONE} self._repo.refs._update(refs) update_refs = [ @@ -361,7 +361,7 @@ def test_apply_pack_del_ref(self): class ProtocolGraphWalkerEmptyTestCase(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self._repo = MemoryRepo.init_bare([], {}) backend = DictBackend({b"/": self._repo}) @@ -372,7 +372,7 @@ def setUp(self): self._repo.refs.get_symrefs, ) - def test_empty_repository(self): + def test_empty_repository(self) -> None: # The server should wait for a flush packet. self._walker.proto.set_output([]) self.assertRaises(HangupException, self._walker.determine_wants, {}) @@ -384,7 +384,7 @@ def test_empty_repository(self): class ProtocolGraphWalkerTestCase(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() # Create the following commit tree: # 3---5 @@ -406,7 +406,7 @@ def setUp(self): self._repo.refs.get_symrefs, ) - def test_all_wants_satisfied_no_haves(self): + def test_all_wants_satisfied_no_haves(self) -> None: self._walker.set_wants([ONE]) self.assertFalse(self._walker.all_wants_satisfied([])) self._walker.set_wants([TWO]) @@ -414,7 +414,7 @@ def test_all_wants_satisfied_no_haves(self): self._walker.set_wants([THREE]) self.assertFalse(self._walker.all_wants_satisfied([])) - def test_all_wants_satisfied_have_root(self): + def test_all_wants_satisfied_have_root(self) -> None: self._walker.set_wants([ONE]) self.assertTrue(self._walker.all_wants_satisfied([ONE])) self._walker.set_wants([TWO]) @@ -422,14 +422,14 @@ def test_all_wants_satisfied_have_root(self): self._walker.set_wants([THREE]) self.assertTrue(self._walker.all_wants_satisfied([ONE])) - def test_all_wants_satisfied_have_branch(self): + def test_all_wants_satisfied_have_branch(self) -> None: self._walker.set_wants([TWO]) self.assertTrue(self._walker.all_wants_satisfied([TWO])) # wrong branch self._walker.set_wants([THREE]) self.assertFalse(self._walker.all_wants_satisfied([TWO])) - def test_all_wants_satisfied(self): + def test_all_wants_satisfied(self) -> None: self._walker.set_wants([FOUR, FIVE]) # trivial case: wants == haves self.assertTrue(self._walker.all_wants_satisfied([FOUR, FIVE])) @@ -439,7 +439,7 @@ def test_all_wants_satisfied(self): self.assertFalse(self._walker.all_wants_satisfied([THREE])) self.assertTrue(self._walker.all_wants_satisfied([TWO, THREE])) - def test_split_proto_line(self): + def test_split_proto_line(self) -> None: allowed = (b"want", b"done", None) self.assertEqual( (b"want", ONE), _split_proto_line(b"want " + ONE + b"\n", allowed) @@ -464,7 +464,7 @@ def test_split_proto_line(self): self.assertEqual((b"done", None), _split_proto_line(b"done\n", allowed)) self.assertEqual((None, None), _split_proto_line(b"", allowed)) - def test_determine_wants(self): + def test_determine_wants(self) -> None: self._walker.proto.set_output([None]) self.assertEqual([], self._walker.determine_wants({})) self.assertEqual(None, self._walker.proto.get_received_line()) @@ -500,7 +500,7 @@ def test_determine_wants(self): self._walker.proto.set_output([b"want " + FOUR + b" multi_ack", None]) self.assertRaises(GitProtocolError, self._walker.determine_wants, heads) - def test_determine_wants_advertisement(self): + def test_determine_wants_advertisement(self) -> None: self._walker.proto.set_output([None]) # advertise branch tips plus tag heads = { @@ -539,16 +539,16 @@ def test_determine_wants_advertisement(self): # TODO: test commit time cutoff - def _handle_shallow_request(self, lines, heads): + def _handle_shallow_request(self, lines, heads) -> None: self._walker.proto.set_output([*lines, None]) self._walker._handle_shallow_request(heads) - def assertReceived(self, expected): + def assertReceived(self, expected) -> None: self.assertEqual( expected, list(iter(self._walker.proto.get_received_line, None)) ) - def test_handle_shallow_request_no_client_shallows(self): + def test_handle_shallow_request_no_client_shallows(self) -> None: self._handle_shallow_request([b"deepen 2\n"], [FOUR, FIVE]) self.assertEqual({TWO, THREE}, self._walker.shallow) self.assertReceived( @@ -558,7 +558,7 @@ def test_handle_shallow_request_no_client_shallows(self): ] ) - def test_handle_shallow_request_no_new_shallows(self): + def test_handle_shallow_request_no_new_shallows(self) -> None: lines = [ b"shallow " + TWO + b"\n", b"shallow " + THREE + b"\n", @@ -568,7 +568,7 @@ def test_handle_shallow_request_no_new_shallows(self): self.assertEqual({TWO, THREE}, self._walker.shallow) self.assertReceived([]) - def test_handle_shallow_request_unshallows(self): + def test_handle_shallow_request_unshallows(self) -> None: lines = [ b"shallow " + TWO + b"\n", b"deepen 3\n", @@ -603,10 +603,10 @@ def read_proto_line(self, allowed): assert command in allowed return command, sha - def send_ack(self, sha, ack_type=b""): + def send_ack(self, sha, ack_type=b"") -> None: self.acks.append((sha, ack_type)) - def send_nak(self): + def send_nak(self) -> None: self.acks.append((None, b"nak")) def all_wants_satisfied(self, haves): @@ -626,14 +626,14 @@ def handle_done(self): self.pack_sent = self._impl.handle_done(self.done_required, self.done_received) return self.pack_sent - def notify_done(self): + def notify_done(self) -> None: self.done_received = True class AckGraphWalkerImplTestCase(TestCase): """Base setup and asserts for AckGraphWalker tests.""" - def setUp(self): + def setUp(self) -> None: super().setUp() self._walker = TestProtocolGraphWalker() self._walker.lines = [ @@ -645,24 +645,24 @@ def setUp(self): self._impl = self.impl_cls(self._walker) self._walker._impl = self._impl - def assertNoAck(self): + def assertNoAck(self) -> None: self.assertEqual(None, self._walker.pop_ack()) - def assertAcks(self, acks): + def assertAcks(self, acks) -> None: for sha, ack_type in acks: self.assertEqual((sha, ack_type), self._walker.pop_ack()) self.assertNoAck() - def assertAck(self, sha, ack_type=b""): + def assertAck(self, sha, ack_type=b"") -> None: self.assertAcks([(sha, ack_type)]) - def assertNak(self): + def assertNak(self) -> None: self.assertAck(None, b"nak") - def assertNextEquals(self, sha): + def assertNextEquals(self, sha) -> None: self.assertEqual(sha, next(self._impl)) - def assertNextEmpty(self): + def assertNextEmpty(self) -> None: # This is necessary because of no-done - the assumption that it # it safe to immediately send out the final ACK is no longer # true but the test is still needed for it. TestProtocolWalker @@ -675,7 +675,7 @@ def assertNextEmpty(self): class SingleAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase): impl_cls = SingleAckGraphWalkerImpl - def test_single_ack(self): + def test_single_ack(self) -> None: self.assertNextEquals(TWO) self.assertNoAck() @@ -690,7 +690,7 @@ def test_single_ack(self): self.assertNextEquals(None) self.assertNoAck() - def test_single_ack_flush(self): + def test_single_ack_flush(self) -> None: # same as ack test but ends with a flush-pkt instead of done self._walker.lines[-1] = (None, None) @@ -707,7 +707,7 @@ def test_single_ack_flush(self): self.assertNextEquals(None) self.assertNoAck() - def test_single_ack_nak(self): + def test_single_ack_nak(self) -> None: self.assertNextEquals(TWO) self.assertNoAck() @@ -721,7 +721,7 @@ def test_single_ack_nak(self): self.assertNextEmpty() self.assertNak() - def test_single_ack_nak_flush(self): + def test_single_ack_nak_flush(self) -> None: # same as nak test but ends with a flush-pkt instead of done self._walker.lines[-1] = (None, None) @@ -742,7 +742,7 @@ def test_single_ack_nak_flush(self): class MultiAckGraphWalkerImplTestCase(AckGraphWalkerImplTestCase): impl_cls = MultiAckGraphWalkerImpl - def test_multi_ack(self): + def test_multi_ack(self) -> None: self.assertNextEquals(TWO) self.assertNoAck() @@ -758,7 +758,7 @@ def test_multi_ack(self): self.assertNextEmpty() self.assertAck(THREE) - def test_multi_ack_partial(self): + def test_multi_ack_partial(self) -> None: self.assertNextEquals(TWO) self.assertNoAck() @@ -773,7 +773,7 @@ def test_multi_ack_partial(self): self.assertNextEmpty() self.assertAck(ONE) - def test_multi_ack_flush(self): + def test_multi_ack_flush(self) -> None: self._walker.lines = [ (b"have", TWO), (None, None), @@ -798,7 +798,7 @@ def test_multi_ack_flush(self): self.assertNextEmpty() self.assertAck(THREE) - def test_multi_ack_nak(self): + def test_multi_ack_nak(self) -> None: self.assertNextEquals(TWO) self.assertNoAck() @@ -816,7 +816,7 @@ def test_multi_ack_nak(self): class MultiAckDetailedGraphWalkerImplTestCase(AckGraphWalkerImplTestCase): impl_cls = MultiAckDetailedGraphWalkerImpl - def test_multi_ack(self): + def test_multi_ack(self) -> None: self.assertNextEquals(TWO) self.assertNoAck() @@ -837,7 +837,7 @@ def test_multi_ack(self): # PACK is sent self.assertTrue(self._walker.pack_sent) - def test_multi_ack_nodone(self): + def test_multi_ack_nodone(self) -> None: self._walker.done_required = False self.assertNextEquals(TWO) self.assertNoAck() @@ -859,7 +859,7 @@ def test_multi_ack_nodone(self): # PACK is sent self.assertTrue(self._walker.pack_sent) - def test_multi_ack_flush_end(self): + def test_multi_ack_flush_end(self) -> None: # transmission ends with a flush-pkt without a done but no-done is # assumed. self._walker.lines[-1] = (None, None) @@ -881,7 +881,7 @@ def test_multi_ack_flush_end(self): # PACK is NOT sent self.assertFalse(self._walker.pack_sent) - def test_multi_ack_flush_end_nodone(self): + def test_multi_ack_flush_end_nodone(self) -> None: # transmission ends with a flush-pkt without a done but no-done is # assumed. self._walker.lines[-1] = (None, None) @@ -904,7 +904,7 @@ def test_multi_ack_flush_end_nodone(self): # PACK is sent self.assertTrue(self._walker.pack_sent) - def test_multi_ack_partial(self): + def test_multi_ack_partial(self) -> None: self.assertNextEquals(TWO) self.assertNoAck() @@ -919,7 +919,7 @@ def test_multi_ack_partial(self): self.assertNextEmpty() self.assertAck(ONE) - def test_multi_ack_flush(self): + def test_multi_ack_flush(self) -> None: # same as ack test but contains a flush-pkt in the middle self._walker.lines = [ (b"have", TWO), @@ -947,7 +947,7 @@ def test_multi_ack_flush(self): self.assertNextEmpty() self.assertAcks([(THREE, b"ready"), (None, b"nak"), (THREE, b"")]) - def test_multi_ack_nak(self): + def test_multi_ack_nak(self) -> None: self.assertNextEquals(TWO) self.assertNoAck() @@ -964,7 +964,7 @@ def test_multi_ack_nak(self): self.assertNextEmpty() self.assertTrue(self._walker.pack_sent) - def test_multi_ack_nak_nodone(self): + def test_multi_ack_nak_nodone(self) -> None: self._walker.done_required = False self.assertNextEquals(TWO) self.assertNoAck() @@ -983,7 +983,7 @@ def test_multi_ack_nak_nodone(self): self.assertNak() self.assertNextEmpty() - def test_multi_ack_nak_flush(self): + def test_multi_ack_nak_flush(self) -> None: # same as nak test but contains a flush-pkt in the middle self._walker.lines = [ (b"have", TWO), @@ -1005,7 +1005,7 @@ def test_multi_ack_nak_flush(self): self.assertNextEmpty() self.assertNak() - def test_multi_ack_stateless(self): + def test_multi_ack_stateless(self) -> None: # transmission ends with a flush-pkt self._walker.lines[-1] = (None, None) self._walker.stateless_rpc = True @@ -1027,7 +1027,7 @@ def test_multi_ack_stateless(self): self.assertNoAck() self.assertFalse(self._walker.pack_sent) - def test_multi_ack_stateless_nodone(self): + def test_multi_ack_stateless_nodone(self) -> None: self._walker.done_required = False # transmission ends with a flush-pkt self._walker.lines[-1] = (None, None) @@ -1055,7 +1055,7 @@ def test_multi_ack_stateless_nodone(self): class FileSystemBackendTests(TestCase): """Tests for FileSystemBackend.""" - def setUp(self): + def setUp(self) -> None: super().setUp() self.path = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.path) @@ -1065,14 +1065,14 @@ def setUp(self): else: self.backend = FileSystemBackend() - def test_nonexistant(self): + def test_nonexistant(self) -> None: self.assertRaises( NotGitRepository, self.backend.open_repository, "/does/not/exist/unless/foo", ) - def test_absolute(self): + def test_absolute(self) -> None: repo = self.backend.open_repository(self.path) self.assertTrue( os.path.samefile( @@ -1080,14 +1080,14 @@ def test_absolute(self): ) ) - def test_child(self): + def test_child(self) -> None: self.assertRaises( NotGitRepository, self.backend.open_repository, os.path.join(self.path, "foo"), ) - def test_bad_repo_path(self): + def test_bad_repo_path(self) -> None: backend = FileSystemBackend() self.assertRaises(NotGitRepository, lambda: backend.open_repository("/ups")) @@ -1096,7 +1096,7 @@ def test_bad_repo_path(self): class DictBackendTests(TestCase): """Tests for DictBackend.""" - def test_nonexistant(self): + def test_nonexistant(self) -> None: repo = MemoryRepo.init_bare([], {}) backend = DictBackend({b"/": repo}) self.assertRaises( @@ -1105,7 +1105,7 @@ def test_nonexistant(self): "/does/not/exist/unless/foo", ) - def test_bad_repo_path(self): + def test_bad_repo_path(self) -> None: repo = MemoryRepo.init_bare([], {}) backend = DictBackend({b"/": repo}) @@ -1115,7 +1115,7 @@ def test_bad_repo_path(self): class ServeCommandTests(TestCase): """Tests for serve_command.""" - def setUp(self): + def setUp(self) -> None: super().setUp() self.backend = DictBackend({}) @@ -1128,7 +1128,7 @@ def serve_command(self, handler_cls, args, inf, outf): outf=outf, ) - def test_receive_pack(self): + def test_receive_pack(self) -> None: commit = make_commit(id=ONE, parents=[], commit_time=111) self.backend.repos[b"/"] = MemoryRepo.init_bare( [commit], {b"refs/heads/master": commit.id} @@ -1150,13 +1150,13 @@ def test_receive_pack(self): class UpdateServerInfoTests(TestCase): """Tests for update_server_info.""" - def setUp(self): + def setUp(self) -> None: super().setUp() self.path = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.path) self.repo = Repo.init(self.path) - def test_empty(self): + def test_empty(self) -> None: update_server_info(self.repo) with open(os.path.join(self.path, ".git", "info", "refs"), "rb") as f: self.assertEqual(b"", f.read()) @@ -1164,7 +1164,7 @@ def test_empty(self): with open(p, "rb") as f: self.assertEqual(b"", f.read()) - def test_simple(self): + def test_simple(self) -> None: commit_id = self.repo.do_commit( message=b"foo", committer=b"Joe Example ", diff --git a/tests/test_stash.py b/tests/test_stash.py index 2bd3b7c4a..04de27bc9 100644 --- a/tests/test_stash.py +++ b/tests/test_stash.py @@ -29,7 +29,7 @@ class StashTests(TestCase): """Tests for stash.""" - def test_obtain(self): + def test_obtain(self) -> None: repo = MemoryRepo() stash = Stash.from_repo(repo) self.assertIsInstance(stash, Stash) diff --git a/tests/test_utils.py b/tests/test_utils.py index c22667fb7..32b45e2d2 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -28,11 +28,11 @@ class BuildCommitGraphTest(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.store = MemoryObjectStore() - def test_linear(self): + def test_linear(self) -> None: c1, c2 = build_commit_graph(self.store, [[1], [2, 1]]) for obj_id in [c1.id, c2.id, c1.tree, c2.tree]: self.assertIn(obj_id, self.store) @@ -42,7 +42,7 @@ def test_linear(self): self.assertEqual([], self.store[c1.tree].items()) self.assertGreater(c2.commit_time, c1.commit_time) - def test_merge(self): + def test_merge(self) -> None: c1, c2, c3, c4 = build_commit_graph( self.store, [[1], [2, 1], [3, 1], [4, 2, 3]] ) @@ -50,12 +50,12 @@ def test_merge(self): self.assertGreater(c4.commit_time, c2.commit_time) self.assertGreater(c4.commit_time, c3.commit_time) - def test_missing_parent(self): + def test_missing_parent(self) -> None: self.assertRaises( ValueError, build_commit_graph, self.store, [[1], [3, 2], [2, 1]] ) - def test_trees(self): + def test_trees(self) -> None: a1 = make_object(Blob, data=b"aaa1") a2 = make_object(Blob, data=b"aaa2") c1, c2 = build_commit_graph( @@ -66,14 +66,14 @@ def test_trees(self): self.assertEqual((0o100644, a1.id), self.store[c1.tree][b"a"]) self.assertEqual((0o100644, a2.id), self.store[c2.tree][b"a"]) - def test_attrs(self): + def test_attrs(self) -> None: c1, c2 = build_commit_graph( self.store, [[1], [2, 1]], attrs={1: {"message": b"Hooray!"}} ) self.assertEqual(b"Hooray!", c1.message) self.assertEqual(b"Commit 2", c2.message) - def test_commit_time(self): + def test_commit_time(self) -> None: c1, c2, c3 = build_commit_graph( self.store, [[1], [2, 1], [3, 2]], diff --git a/tests/test_walk.py b/tests/test_walk.py index f803b668f..891c29294 100644 --- a/tests/test_walk.py +++ b/tests/test_walk.py @@ -50,7 +50,7 @@ def __eq__(self, other): class WalkerTest(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.store = MemoryObjectStore() @@ -70,7 +70,7 @@ def make_linear_commits(self, num_commits, **kwargs): commit_spec.append(c) return self.make_commits(commit_spec, **kwargs) - def assertWalkYields(self, expected, *args, **kwargs): + def assertWalkYields(self, expected, *args, **kwargs) -> None: walker = Walker(self.store, *args, **kwargs) expected = list(expected) for i, entry in enumerate(expected): @@ -79,13 +79,13 @@ def assertWalkYields(self, expected, *args, **kwargs): actual = list(walker) self.assertEqual(expected, actual) - def test_tag(self): + def test_tag(self) -> None: c1, c2, c3 = self.make_linear_commits(3) t2 = make_tag(target=c2) self.store.add_object(t2) self.assertWalkYields([c2, c1], [t2.id]) - def test_linear(self): + def test_linear(self) -> None: c1, c2, c3 = self.make_linear_commits(3) self.assertWalkYields([c1], [c1.id]) self.assertWalkYields([c2, c1], [c2.id]) @@ -95,7 +95,7 @@ def test_linear(self): self.assertWalkYields([c3, c2], [c3.id, c1.id], exclude=[c1.id]) self.assertWalkYields([c3], [c3.id, c1.id], exclude=[c2.id]) - def test_missing(self): + def test_missing(self) -> None: cs = list(reversed(self.make_linear_commits(20))) self.assertWalkYields(cs, [cs[0].id]) @@ -107,7 +107,7 @@ def test_missing(self): self.assertWalkYields(cs[:i], [cs[0].id], max_entries=i) self.assertRaises(MissingCommitError, Walker, self.store, [cs[-1].id]) - def test_branch(self): + def test_branch(self) -> None: c1, x2, x3, y4 = self.make_commits([[1], [2, 1], [3, 2], [4, 1]]) self.assertWalkYields([x3, x2, c1], [x3.id]) self.assertWalkYields([y4, c1], [y4.id]) @@ -117,7 +117,7 @@ def test_branch(self): self.assertWalkYields([y4], [y4.id], exclude=[x3.id]) self.assertWalkYields([x3, x2], [x3.id], exclude=[y4.id]) - def test_merge(self): + def test_merge(self) -> None: c1, c2, c3, c4 = self.make_commits([[1], [2, 1], [3, 1], [4, 2, 3]]) self.assertWalkYields([c4, c3, c2, c1], [c4.id]) self.assertWalkYields([c3, c1], [c3.id]) @@ -125,7 +125,7 @@ def test_merge(self): self.assertWalkYields([c4, c3], [c4.id], exclude=[c2.id]) self.assertWalkYields([c4, c2], [c4.id], exclude=[c3.id]) - def test_merge_of_new_branch_from_old_base(self): + def test_merge_of_new_branch_from_old_base(self) -> None: # The commit on the branch was made at a time after any of the # commits on master, but the branch was from an older commit. # See also test_merge_of_old_branch @@ -139,7 +139,7 @@ def test_merge_of_new_branch_from_old_base(self): self.assertWalkYields([c2, c1], [c2.id]) @expectedFailure - def test_merge_of_old_branch(self): + def test_merge_of_old_branch(self) -> None: # The commit on the branch was made at a time before any of # the commits on master, but it was merged into master after # those commits. @@ -153,23 +153,23 @@ def test_merge_of_old_branch(self): self.assertWalkYields([c3, c2, c1], [c3.id]) self.assertWalkYields([c2, c1], [c2.id]) - def test_reverse(self): + def test_reverse(self) -> None: c1, c2, c3 = self.make_linear_commits(3) self.assertWalkYields([c1, c2, c3], [c3.id], reverse=True) - def test_max_entries(self): + def test_max_entries(self) -> None: c1, c2, c3 = self.make_linear_commits(3) self.assertWalkYields([c3, c2, c1], [c3.id], max_entries=3) self.assertWalkYields([c3, c2], [c3.id], max_entries=2) self.assertWalkYields([c3], [c3.id], max_entries=1) - def test_reverse_after_max_entries(self): + def test_reverse_after_max_entries(self) -> None: c1, c2, c3 = self.make_linear_commits(3) self.assertWalkYields([c1, c2, c3], [c3.id], max_entries=3, reverse=True) self.assertWalkYields([c2, c3], [c3.id], max_entries=2, reverse=True) self.assertWalkYields([c3], [c3.id], max_entries=1, reverse=True) - def test_changes_one_parent(self): + def test_changes_one_parent(self) -> None: blob_a1 = make_object(Blob, data=b"a1") blob_a2 = make_object(Blob, data=b"a2") blob_b2 = make_object(Blob, data=b"b2") @@ -190,7 +190,7 @@ def test_changes_one_parent(self): ) self.assertWalkYields([e2, e1], [c2.id]) - def test_changes_multiple_parents(self): + def test_changes_multiple_parents(self) -> None: blob_a1 = make_object(Blob, data=b"a1") blob_b2 = make_object(Blob, data=b"b2") blob_a3 = make_object(Blob, data=b"a3") @@ -213,7 +213,7 @@ def test_changes_multiple_parents(self): [TestWalkEntry(c3, changes)], [c3.id], exclude=[c1.id, c2.id] ) - def test_path_matches(self): + def test_path_matches(self) -> None: walker = Walker(None, [], paths=[b"foo", b"bar", b"baz/quux"]) self.assertTrue(walker._path_matches(b"foo")) self.assertTrue(walker._path_matches(b"foo/a")) @@ -228,7 +228,7 @@ def test_path_matches(self): self.assertFalse(walker._path_matches(b"baz")) self.assertFalse(walker._path_matches(b"baz/quu")) - def test_paths(self): + def test_paths(self) -> None: blob_a1 = make_object(Blob, data=b"a1") blob_b2 = make_object(Blob, data=b"b2") blob_a3 = make_object(Blob, data=b"a3") @@ -255,7 +255,7 @@ def test_paths(self): [TestWalkEntry(c3, changes)], [c3.id], max_entries=1, paths=[b"a"] ) - def test_paths_subtree(self): + def test_paths_subtree(self) -> None: blob_a = make_object(Blob, data=b"a") blob_b = make_object(Blob, data=b"b") c1, c2, c3 = self.make_linear_commits( @@ -269,7 +269,7 @@ def test_paths_subtree(self): self.assertWalkYields([c2], [c3.id], paths=[b"b"]) self.assertWalkYields([c3, c1], [c3.id], paths=[b"x"]) - def test_paths_max_entries(self): + def test_paths_max_entries(self) -> None: blob_a = make_object(Blob, data=b"a") blob_b = make_object(Blob, data=b"b") c1, c2 = self.make_linear_commits( @@ -278,7 +278,7 @@ def test_paths_max_entries(self): self.assertWalkYields([c2], [c2.id], paths=[b"b"], max_entries=1) self.assertWalkYields([c1], [c1.id], paths=[b"a"], max_entries=1) - def test_paths_merge(self): + def test_paths_merge(self) -> None: blob_a1 = make_object(Blob, data=b"a1") blob_a2 = make_object(Blob, data=b"a2") blob_a3 = make_object(Blob, data=b"a3") @@ -294,7 +294,7 @@ def test_paths_merge(self): self.assertWalkYields([m3, y2, x1], [m3.id], paths=[b"a"]) self.assertWalkYields([y2, x1], [m4.id], paths=[b"a"]) - def test_changes_with_renames(self): + def test_changes_with_renames(self) -> None: blob = make_object(Blob, data=b"blob") c1, c2 = self.make_linear_commits( 2, trees={1: [(b"a", blob)], 2: [(b"b", blob)]} @@ -319,7 +319,7 @@ def test_changes_with_renames(self): rename_detector=detector, ) - def test_follow_rename(self): + def test_follow_rename(self) -> None: blob = make_object(Blob, data=b"blob") names = [b"a", b"a", b"b", b"b", b"c", b"c"] @@ -341,7 +341,7 @@ def e(n): follow=True, ) - def test_follow_rename_remove_path(self): + def test_follow_rename_remove_path(self) -> None: blob = make_object(Blob, data=b"blob") _, _, _, c4, c5, c6 = self.make_linear_commits( 6, @@ -370,7 +370,7 @@ def e(n): follow=True, ) - def test_since(self): + def test_since(self) -> None: c1, c2, c3 = self.make_linear_commits(3) self.assertWalkYields([c3, c2, c1], [c3.id], since=-1) self.assertWalkYields([c3, c2, c1], [c3.id], since=0) @@ -383,7 +383,7 @@ def test_since(self): self.assertWalkYields([], [c3.id], since=201) self.assertWalkYields([], [c3.id], since=300) - def test_until(self): + def test_until(self) -> None: c1, c2, c3 = self.make_linear_commits(3) self.assertWalkYields([], [c3.id], until=-1) self.assertWalkYields([c1], [c3.id], until=0) @@ -396,14 +396,14 @@ def test_until(self): self.assertWalkYields([c3, c2, c1], [c3.id], until=201) self.assertWalkYields([c3, c2, c1], [c3.id], until=300) - def test_since_until(self): + def test_since_until(self) -> None: c1, c2, c3 = self.make_linear_commits(3) self.assertWalkYields([], [c3.id], since=100, until=99) self.assertWalkYields([c3, c2, c1], [c3.id], since=-1, until=201) self.assertWalkYields([c2], [c3.id], since=100, until=100) self.assertWalkYields([c2], [c3.id], since=50, until=150) - def test_since_over_scan(self): + def test_since_over_scan(self) -> None: commits = self.make_linear_commits(11, times=[9, 0, 1, 2, 3, 4, 5, 8, 6, 7, 9]) c8, _, c10, c11 = commits[-4:] del self.store[commits[0].id] @@ -413,18 +413,18 @@ def test_since_over_scan(self): # even with over-scanning. self.assertWalkYields([c11, c10, c8], [c11.id], since=7) - def assertTopoOrderEqual(self, expected_commits, commits): + def assertTopoOrderEqual(self, expected_commits, commits) -> None: entries = [TestWalkEntry(c, None) for c in commits] actual_ids = [e.commit.id for e in list(_topo_reorder(entries))] self.assertEqual([c.id for c in expected_commits], actual_ids) - def test_topo_reorder_linear(self): + def test_topo_reorder_linear(self) -> None: commits = self.make_linear_commits(5) commits.reverse() for perm in permutations(commits): self.assertTopoOrderEqual(commits, perm) - def test_topo_reorder_multiple_parents(self): + def test_topo_reorder_multiple_parents(self) -> None: c1, c2, c3 = self.make_commits([[1], [2], [3, 1, 2]]) # Already sorted, so totally FIFO. self.assertTopoOrderEqual([c3, c2, c1], [c3, c2, c1]) @@ -438,7 +438,7 @@ def test_topo_reorder_multiple_parents(self): self.assertTopoOrderEqual([c3, c2, c1], [c1, c2, c3]) self.assertTopoOrderEqual([c3, c2, c1], [c2, c1, c3]) - def test_topo_reorder_multiple_children(self): + def test_topo_reorder_multiple_children(self) -> None: c1, c2, c3 = self.make_commits([[1], [2, 1], [3, 1]]) # c2 and c3 are FIFO but c1 moves to the end. @@ -450,14 +450,14 @@ def test_topo_reorder_multiple_children(self): self.assertTopoOrderEqual([c2, c3, c1], [c2, c1, c3]) self.assertTopoOrderEqual([c2, c3, c1], [c1, c2, c3]) - def test_out_of_order_children(self): + def test_out_of_order_children(self) -> None: c1, c2, c3, c4, c5 = self.make_commits( [[1], [2, 1], [3, 2], [4, 1], [5, 3, 4]], times=[2, 1, 3, 4, 5] ) self.assertWalkYields([c5, c4, c3, c1, c2], [c5.id]) self.assertWalkYields([c5, c4, c3, c2, c1], [c5.id], order=ORDER_TOPO) - def test_out_of_order_with_exclude(self): + def test_out_of_order_with_exclude(self) -> None: # Create the following graph: # c1-------x2---m6 # \ / @@ -472,13 +472,13 @@ def test_out_of_order_with_exclude(self): # priority queue long before y5. self.assertWalkYields([m6, x2], [m6.id], exclude=[y5.id]) - def test_empty_walk(self): + def test_empty_walk(self) -> None: c1, c2, c3 = self.make_linear_commits(3) self.assertWalkYields([], [c3.id], exclude=[c3.id]) class WalkEntryTest(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self.store = MemoryObjectStore() @@ -498,7 +498,7 @@ def make_linear_commits(self, num_commits, **kwargs): commit_spec.append(c) return self.make_commits(commit_spec, **kwargs) - def test_all_changes(self): + def test_all_changes(self) -> None: # Construct a commit with 2 files in different subdirectories. blob_a = make_object(Blob, data=b"a") blob_b = make_object(Blob, data=b"b") @@ -520,7 +520,7 @@ def test_all_changes(self): changes, ) - def test_all_with_merge(self): + def test_all_with_merge(self) -> None: blob_a = make_object(Blob, data=b"a") blob_a2 = make_object(Blob, data=b"a2") blob_b = make_object(Blob, data=b"b") @@ -560,7 +560,7 @@ def test_all_with_merge(self): changes, ) - def test_filter_changes(self): + def test_filter_changes(self) -> None: # Construct a commit with 2 files in different subdirectories. blob_a = make_object(Blob, data=b"a") blob_b = make_object(Blob, data=b"b") @@ -581,7 +581,7 @@ def test_filter_changes(self): changes, ) - def test_filter_with_merge(self): + def test_filter_with_merge(self) -> None: blob_a = make_object(Blob, data=b"a") blob_a2 = make_object(Blob, data=b"a2") blob_b = make_object(Blob, data=b"b") diff --git a/tests/test_web.py b/tests/test_web.py index 02cbe48cc..e2a6c9702 100644 --- a/tests/test_web.py +++ b/tests/test_web.py @@ -24,6 +24,7 @@ import os import re from io import BytesIO +from typing import NoReturn from dulwich.object_store import MemoryObjectStore from dulwich.objects import Blob @@ -71,10 +72,10 @@ def read(self, howmuch): class MinimalistWSGIInputStream2(MinimalistWSGIInputStream): """WSGI input stream with no *working* 'seek()' and 'tell()' methods.""" - def seek(self, pos): + def seek(self, pos) -> NoReturn: raise NotImplementedError - def tell(self): + def tell(self) -> NoReturn: raise NotImplementedError @@ -85,10 +86,10 @@ def __init__(self, *args, **kwargs) -> None: HTTPGitRequest.__init__(self, *args, **kwargs) self.cached = None - def nocache(self): + def nocache(self) -> None: self.cached = False - def cache_forever(self): + def cache_forever(self) -> None: self.cached = True @@ -97,7 +98,7 @@ class WebTestCase(TestCase): _req_class: type[HTTPGitRequest] = TestHTTPGitRequest - def setUp(self): + def setUp(self) -> None: super().setUp() self._environ = {} self._req = self._req_class( @@ -112,10 +113,10 @@ def _start_response(self, status, headers): self._headers = list(headers) return self._output.write - def _handlers(self): + def _handlers(self) -> None: return None - def assertContentTypeEquals(self, expected): + def assertContentTypeEquals(self, expected) -> None: self.assertIn(("Content-Type", expected), self._headers) @@ -131,11 +132,11 @@ def _test_backend(objects, refs=None, named_files=None): class DumbHandlersTestCase(WebTestCase): - def test_send_file_not_found(self): + def test_send_file_not_found(self) -> None: list(send_file(self._req, None, "text/plain")) self.assertEqual(HTTP_NOT_FOUND, self._status) - def test_send_file(self): + def test_send_file(self) -> None: f = BytesIO(b"foobar") output = b"".join(send_file(self._req, f, "some/thing")) self.assertEqual(b"foobar", output) @@ -143,7 +144,7 @@ def test_send_file(self): self.assertContentTypeEquals("some/thing") self.assertTrue(f.closed) - def test_send_file_buffered(self): + def test_send_file_buffered(self) -> None: bufsize = 10240 xs = b"x" * bufsize f = BytesIO(2 * xs) @@ -152,16 +153,16 @@ def test_send_file_buffered(self): self.assertContentTypeEquals("some/thing") self.assertTrue(f.closed) - def test_send_file_error(self): + def test_send_file_error(self) -> None: class TestFile: def __init__(self, exc_class) -> None: self.closed = False self._exc_class = exc_class - def read(self, size=-1): + def read(self, size=-1) -> NoReturn: raise self._exc_class - def close(self): + def close(self) -> None: self.closed = True f = TestFile(IOError) @@ -176,7 +177,7 @@ def close(self): self.assertTrue(f.closed) self.assertFalse(self._req.cached) - def test_get_text_file(self): + def test_get_text_file(self) -> None: backend = _test_backend([], named_files={"description": b"foo"}) mat = re.search(".*", "description") output = b"".join(get_text_file(self._req, backend, mat)) @@ -185,7 +186,7 @@ def test_get_text_file(self): self.assertContentTypeEquals("text/plain") self.assertFalse(self._req.cached) - def test_get_loose_object(self): + def test_get_loose_object(self) -> None: blob = make_object(Blob, data=b"foo") backend = _test_backend([blob]) mat = re.search("^(..)(.{38})$", blob.id.decode("ascii")) @@ -195,17 +196,17 @@ def test_get_loose_object(self): self.assertContentTypeEquals("application/x-git-loose-object") self.assertTrue(self._req.cached) - def test_get_loose_object_missing(self): + def test_get_loose_object_missing(self) -> None: mat = re.search("^(..)(.{38})$", "1" * 40) list(get_loose_object(self._req, _test_backend([]), mat)) self.assertEqual(HTTP_NOT_FOUND, self._status) - def test_get_loose_object_error(self): + def test_get_loose_object_error(self) -> None: blob = make_object(Blob, data=b"foo") backend = _test_backend([blob]) mat = re.search("^(..)(.{38})$", blob.id.decode("ascii")) - def as_legacy_object_error(self): + def as_legacy_object_error(self) -> NoReturn: raise OSError self.addCleanup(setattr, Blob, "as_legacy_object", Blob.as_legacy_object) @@ -213,7 +214,7 @@ def as_legacy_object_error(self): list(get_loose_object(self._req, backend, mat)) self.assertEqual(HTTP_ERROR, self._status) - def test_get_pack_file(self): + def test_get_pack_file(self) -> None: pack_name = os.path.join("objects", "pack", "pack-%s.pack" % ("1" * 40)) backend = _test_backend([], named_files={pack_name: b"pack contents"}) mat = re.search(".*", pack_name) @@ -223,7 +224,7 @@ def test_get_pack_file(self): self.assertContentTypeEquals("application/x-git-packed-objects") self.assertTrue(self._req.cached) - def test_get_idx_file(self): + def test_get_idx_file(self) -> None: idx_name = os.path.join("objects", "pack", "pack-%s.idx" % ("1" * 40)) backend = _test_backend([], named_files={idx_name: b"idx contents"}) mat = re.search(".*", idx_name) @@ -233,7 +234,7 @@ def test_get_idx_file(self): self.assertContentTypeEquals("application/x-git-packed-objects-toc") self.assertTrue(self._req.cached) - def test_get_info_refs(self): + def test_get_info_refs(self) -> None: self._environ["QUERY_STRING"] = "" blob1 = make_object(Blob, data=b"1") @@ -265,7 +266,7 @@ def test_get_info_refs(self): self.assertContentTypeEquals("text/plain") self.assertFalse(self._req.cached) - def test_get_info_refs_not_found(self): + def test_get_info_refs_not_found(self) -> None: self._environ["QUERY_STRING"] = "" objects = [] @@ -280,7 +281,7 @@ def test_get_info_refs_not_found(self): self.assertEqual(HTTP_NOT_FOUND, self._status) self.assertContentTypeEquals("text/plain") - def test_get_info_packs(self): + def test_get_info_packs(self) -> None: class TestPackData: def __init__(self, sha) -> None: self.filename = f"pack-{sha}.pack" @@ -326,7 +327,7 @@ def __init__( self.stateless_rpc = stateless_rpc self.advertise_refs = advertise_refs - def handle(self): + def handle(self) -> None: self.proto.write(b"handled input: " + self.proto.recv(1024)) def _make_handler(self, *args, **kwargs): @@ -336,21 +337,21 @@ def _make_handler(self, *args, **kwargs): def _handlers(self): return {b"git-upload-pack": self._make_handler} - def test_handle_service_request_unknown(self): + def test_handle_service_request_unknown(self) -> None: mat = re.search(".*", "/git-evil-handler") content = list(handle_service_request(self._req, "backend", mat)) self.assertEqual(HTTP_FORBIDDEN, self._status) self.assertNotIn(b"git-evil-handler", b"".join(content)) self.assertFalse(self._req.cached) - def _run_handle_service_request(self, content_length=None): + def _run_handle_service_request(self, content_length=None) -> None: self._environ["wsgi.input"] = BytesIO(b"foo") if content_length is not None: self._environ["CONTENT_LENGTH"] = content_length mat = re.search(".*", "/git-upload-pack") class Backend: - def open_repository(self, path): + def open_repository(self, path) -> None: return None handler_output = b"".join(handle_service_request(self._req, Backend(), mat)) @@ -363,20 +364,20 @@ def open_repository(self, path): self.assertTrue(self._handler.stateless_rpc) self.assertFalse(self._req.cached) - def test_handle_service_request(self): + def test_handle_service_request(self) -> None: self._run_handle_service_request() - def test_handle_service_request_with_length(self): + def test_handle_service_request_with_length(self) -> None: self._run_handle_service_request(content_length="3") - def test_handle_service_request_empty_length(self): + def test_handle_service_request_empty_length(self) -> None: self._run_handle_service_request(content_length="") - def test_get_info_refs_unknown(self): + def test_get_info_refs_unknown(self) -> None: self._environ["QUERY_STRING"] = "service=git-evil-handler" class Backend: - def open_repository(self, url): + def open_repository(self, url) -> None: return None mat = re.search(".*", "/git-evil-pack") @@ -385,12 +386,12 @@ def open_repository(self, url): self.assertEqual(HTTP_FORBIDDEN, self._status) self.assertFalse(self._req.cached) - def test_get_info_refs(self): + def test_get_info_refs(self) -> None: self._environ["wsgi.input"] = BytesIO(b"foo") self._environ["QUERY_STRING"] = "service=git-upload-pack" class Backend: - def open_repository(self, url): + def open_repository(self, url) -> None: return None mat = re.search(".*", "/git-upload-pack") @@ -413,16 +414,16 @@ def open_repository(self, url): class LengthLimitedFileTestCase(TestCase): - def test_no_cutoff(self): + def test_no_cutoff(self) -> None: f = _LengthLimitedFile(BytesIO(b"foobar"), 1024) self.assertEqual(b"foobar", f.read()) - def test_cutoff(self): + def test_cutoff(self) -> None: f = _LengthLimitedFile(BytesIO(b"foobar"), 3) self.assertEqual(b"foo", f.read()) self.assertEqual(b"", f.read()) - def test_multiple_reads(self): + def test_multiple_reads(self) -> None: f = _LengthLimitedFile(BytesIO(b"foobar"), 3) self.assertEqual(b"fo", f.read(2)) self.assertEqual(b"o", f.read(2)) @@ -433,26 +434,26 @@ class HTTPGitRequestTestCase(WebTestCase): # This class tests the contents of the actual cache headers _req_class = HTTPGitRequest - def test_not_found(self): + def test_not_found(self) -> None: self._req.cache_forever() # cache headers should be discarded message = "Something not found" self.assertEqual(message.encode("ascii"), self._req.not_found(message)) self.assertEqual(HTTP_NOT_FOUND, self._status) self.assertEqual({("Content-Type", "text/plain")}, set(self._headers)) - def test_forbidden(self): + def test_forbidden(self) -> None: self._req.cache_forever() # cache headers should be discarded message = "Something not found" self.assertEqual(message.encode("ascii"), self._req.forbidden(message)) self.assertEqual(HTTP_FORBIDDEN, self._status) self.assertEqual({("Content-Type", "text/plain")}, set(self._headers)) - def test_respond_ok(self): + def test_respond_ok(self) -> None: self._req.respond() self.assertEqual([], self._headers) self.assertEqual(HTTP_OK, self._status) - def test_respond(self): + def test_respond(self) -> None: self._req.nocache() self._req.respond( status=402, @@ -474,7 +475,7 @@ def test_respond(self): class HTTPGitApplicationTestCase(TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() self._app = HTTPGitApplication("backend") @@ -483,25 +484,25 @@ def setUp(self): "REQUEST_METHOD": "GET", } - def _test_handler(self, req, backend, mat): + def _test_handler(self, req, backend, mat) -> str: # tests interface used by all handlers self.assertEqual(self._environ, req.environ) self.assertEqual("backend", backend) self.assertEqual("/foo", mat.group(0)) return "output" - def _add_handler(self, app): + def _add_handler(self, app) -> None: req = self._environ["REQUEST_METHOD"] app.services = { (req, re.compile("/foo$")): self._test_handler, } - def test_call(self): + def test_call(self) -> None: self._add_handler(self._app) self.assertEqual("output", self._app(self._environ, None)) - def test_fallback_app(self): - def test_app(environ, start_response): + def test_fallback_app(self) -> None: + def test_app(environ, start_response) -> str: return "output" app = HTTPGitApplication("backend", fallback_app=test_app) @@ -514,7 +515,7 @@ class GunzipTestCase(HTTPGitApplicationTestCase): """ example_text = __doc__.encode("ascii") - def setUp(self): + def setUp(self) -> None: super().setUp() self._app = GunzipFilter(self._app) self._environ["HTTP_CONTENT_ENCODING"] = "gzip" @@ -529,7 +530,7 @@ def _get_zstream(self, text): zstream.seek(0) return zstream, zlength - def _test_call(self, orig, zstream, zlength): + def _test_call(self, orig, zstream, zlength) -> None: self._add_handler(self._app.app) self.assertLess(zlength, len(orig)) self.assertEqual(self._environ["HTTP_CONTENT_ENCODING"], "gzip") @@ -543,10 +544,10 @@ def _test_call(self, orig, zstream, zlength): self.assertIs(None, self._environ.get("CONTENT_LENGTH")) self.assertNotIn("HTTP_CONTENT_ENCODING", self._environ) - def test_call(self): + def test_call(self) -> None: self._test_call(self.example_text, *self._get_zstream(self.example_text)) - def test_call_no_seek(self): + def test_call_no_seek(self) -> None: """This ensures that the gunzipping code doesn't require any methods on 'wsgi.input' except for '.read()'. (In particular, it shouldn't require '.seek()'. See https://github.com/jelmer/dulwich/issues/140.). @@ -558,7 +559,7 @@ def test_call_no_seek(self): zlength, ) - def test_call_no_working_seek(self): + def test_call_no_working_seek(self) -> None: """Similar to 'test_call_no_seek', but this time the methods are available (but defunct). See https://github.com/jonashaag/klaus/issues/154. """