From 16d4d75a2c9a6c362e4c4b664960036ee89ab0cc Mon Sep 17 00:00:00 2001 From: Roman Donchenko Date: Thu, 31 Oct 2024 14:01:31 +0200 Subject: [PATCH] Blacken code by default (#8611) Make the Black workflow just run black with default settings. Instead of including specific directories, exclude the ones that haven't been blackened yet via `pyproject.toml`. Leave a few small files out of the exclusion list; blacken them instead. This is one step towards project-wide code formatting. It also makes it easier to format your entire patch when you touch multiple components, since you don't have to remember which directories to apply `black` to. --- .github/workflows/black.yml | 33 +----- cvat/__init__.py | 2 +- cvat/apps/profiler.py | 5 +- cvat/asgi.py | 1 + cvat/rq_patching.py | 20 ++-- cvat/rqworker.py | 3 + cvat/urls.py | 38 +++---- cvat/utils/background_jobs.py | 7 +- cvat/utils/http.py | 7 +- cvat/utils/remote_debugger.py | 11 +- cvat/utils/version.py | 27 +++-- dev/check_changelog_fragments.py | 18 ++-- dev/update_version.py | 169 ++++++++++++++++++------------- pyproject.toml | 11 ++ rqscheduler.py | 2 +- site/build_docs.py | 4 +- site/process_sdk_docs.py | 16 +-- utils/__init__.py | 1 - utils/dicom_converter/script.py | 61 ++++++----- 19 files changed, 237 insertions(+), 199 deletions(-) diff --git a/.github/workflows/black.yml b/.github/workflows/black.yml index f3ec1d48262..a74f70c5437 100644 --- a/.github/workflows/black.yml +++ b/.github/workflows/black.yml @@ -5,38 +5,11 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - id: files - uses: tj-actions/changed-files@v41.0.0 - with: - files: | - cvat-sdk/**/*.py - cvat-cli/**/*.py - tests/python/**/*.py - cvat/apps/quality_control/**/*.py - cvat/apps/analytics_report/**/*.py - dir_names: true - name: Run checks - env: - PR_FILES_AM: ${{ steps.files.outputs.added_modified }} - PR_FILES_RENAMED: ${{ steps.files.outputs.renamed }} run: | - # If different modules use different Black configs, - # we need to run Black for each python component group separately. - # Otherwise, they all will use the same config. + pipx install $(grep "^black" ./cvat-cli/requirements/development.txt) - UPDATED_DIRS="${{steps.files.outputs.all_changed_files}}" + echo "Black version: $(black --version)" - if [[ ! -z $UPDATED_DIRS ]]; then - pipx install $(egrep "black.*" ./cvat-cli/requirements/development.txt) - - echo "Black version: "$(black --version) - echo "The dirs will be checked: $UPDATED_DIRS" - EXIT_CODE=0 - for DIR in $UPDATED_DIRS; do - black --check --diff $DIR || EXIT_CODE=$(($? | $EXIT_CODE)) || true - done - exit $EXIT_CODE - else - echo "No files with the \"py\" extension found" - fi + black --check --diff . diff --git a/cvat/__init__.py b/cvat/__init__.py index d72cb8e0099..88392366440 100644 --- a/cvat/__init__.py +++ b/cvat/__init__.py @@ -4,6 +4,6 @@ from cvat.utils.version import get_version -VERSION = (2, 22, 0, 'alpha', 0) +VERSION = (2, 22, 0, "alpha", 0) __version__ = get_version(VERSION) diff --git a/cvat/apps/profiler.py b/cvat/apps/profiler.py index 45ddbc95f47..0a3885bb00a 100644 --- a/cvat/apps/profiler.py +++ b/cvat/apps/profiler.py @@ -1,13 +1,16 @@ from django.apps import apps -if apps.is_installed('silk'): +if apps.is_installed("silk"): from silk.profiling.profiler import silk_profile # pylint: disable=unused-import else: from functools import wraps + def silk_profile(name=None): def profile(f): @wraps(f) def wrapped(*args, **kwargs): return f(*args, **kwargs) + return wrapped + return profile diff --git a/cvat/asgi.py b/cvat/asgi.py index 44ddd0d8713..2fbe40a8d4c 100644 --- a/cvat/asgi.py +++ b/cvat/asgi.py @@ -24,6 +24,7 @@ if debug.is_debugging_enabled(): + class DebuggerApp(ASGIHandler): """ Support for VS code debugger diff --git a/cvat/rq_patching.py b/cvat/rq_patching.py index cd8c1ac7422..a12bcaaaedd 100644 --- a/cvat/rq_patching.py +++ b/cvat/rq_patching.py @@ -32,18 +32,25 @@ def custom_started_job_registry_cleanup(self, timestamp: Optional[float] = None) job_ids = self.get_expired_job_ids(score) if job_ids: - failed_job_registry = rq.registry.FailedJobRegistry(self.name, self.connection, serializer=self.serializer) + failed_job_registry = rq.registry.FailedJobRegistry( + self.name, self.connection, serializer=self.serializer + ) queue = self.get_queue() with self.connection.pipeline() as pipeline: for job_id in job_ids: try: - job = self.job_class.fetch(job_id, connection=self.connection, serializer=self.serializer) + job = self.job_class.fetch( + job_id, connection=self.connection, serializer=self.serializer + ) except NoSuchJobError: continue job.execute_failure_callback( - self.death_penalty_class, AbandonedJobError, AbandonedJobError(), traceback.extract_stack() + self.death_penalty_class, + AbandonedJobError, + AbandonedJobError(), + traceback.extract_stack(), ) retry = job.retries_left and job.retries_left > 0 @@ -54,8 +61,8 @@ def custom_started_job_registry_cleanup(self, timestamp: Optional[float] = None) else: exc_string = f"due to {AbandonedJobError.__name__}" rq.registry.logger.warning( - f'{self.__class__.__name__} cleanup: Moving job to {rq.registry.FailedJobRegistry.__name__} ' - f'({exc_string})' + f"{self.__class__.__name__} cleanup: Moving job to {rq.registry.FailedJobRegistry.__name__} " + f"({exc_string})" ) job.set_status(JobStatus.FAILED) job._exc_info = f"Moved to {rq.registry.FailedJobRegistry.__name__}, {exc_string}, at {datetime.now()}" @@ -69,7 +76,8 @@ def custom_started_job_registry_cleanup(self, timestamp: Optional[float] = None) return job_ids + def update_started_job_registry_cleanup() -> None: # don't forget to check if the issue https://github.com/rq/rq/issues/2006 has been resolved in upstream - assert VERSION == '1.16.0' + assert VERSION == "1.16.0" rq.registry.StartedJobRegistry.cleanup = custom_started_job_registry_cleanup diff --git a/cvat/rqworker.py b/cvat/rqworker.py index d368a1ef262..8a3e187b74b 100644 --- a/cvat/rqworker.py +++ b/cvat/rqworker.py @@ -42,12 +42,14 @@ def execute_job(self, *args, **kwargs): # errors during debugging # https://stackoverflow.com/questions/8242837/django-multiprocessing-and-database-connections/10684672#10684672 from django import db + db.connections.close_all() return self.perform_job(*args, **kwargs) if debug.is_debugging_enabled(): + class RemoteDebugWorker(SimpleWorker): """ Support for VS code debugger @@ -68,6 +70,7 @@ def execute_job(self, *args, **kwargs): if os.environ.get("COVERAGE_PROCESS_START"): import coverage + default_exit = os._exit def coverage_exit(*args, **kwargs): diff --git a/cvat/urls.py b/cvat/urls.py index 144ed619f76..08257a14b81 100644 --- a/cvat/urls.py +++ b/cvat/urls.py @@ -23,31 +23,31 @@ from django.urls import path, include urlpatterns = [ - path('admin/', admin.site.urls), - path('', include('cvat.apps.engine.urls')), - path('django-rq/', include('django_rq.urls')), + path("admin/", admin.site.urls), + path("", include("cvat.apps.engine.urls")), + path("django-rq/", include("django_rq.urls")), ] -if apps.is_installed('cvat.apps.log_viewer'): - urlpatterns.append(path('', include('cvat.apps.log_viewer.urls'))) +if apps.is_installed("cvat.apps.log_viewer"): + urlpatterns.append(path("", include("cvat.apps.log_viewer.urls"))) -if apps.is_installed('cvat.apps.events'): - urlpatterns.append(path('api/', include('cvat.apps.events.urls'))) +if apps.is_installed("cvat.apps.events"): + urlpatterns.append(path("api/", include("cvat.apps.events.urls"))) -if apps.is_installed('cvat.apps.lambda_manager'): - urlpatterns.append(path('', include('cvat.apps.lambda_manager.urls'))) +if apps.is_installed("cvat.apps.lambda_manager"): + urlpatterns.append(path("", include("cvat.apps.lambda_manager.urls"))) -if apps.is_installed('cvat.apps.webhooks'): - urlpatterns.append(path('api/', include('cvat.apps.webhooks.urls'))) +if apps.is_installed("cvat.apps.webhooks"): + urlpatterns.append(path("api/", include("cvat.apps.webhooks.urls"))) -if apps.is_installed('cvat.apps.quality_control'): - urlpatterns.append(path('api/', include('cvat.apps.quality_control.urls'))) +if apps.is_installed("cvat.apps.quality_control"): + urlpatterns.append(path("api/", include("cvat.apps.quality_control.urls"))) -if apps.is_installed('silk'): - urlpatterns.append(path('profiler/', include('silk.urls'))) +if apps.is_installed("silk"): + urlpatterns.append(path("profiler/", include("silk.urls"))) -if apps.is_installed('health_check'): - urlpatterns.append(path('api/server/health/', include('health_check.urls'))) +if apps.is_installed("health_check"): + urlpatterns.append(path("api/server/health/", include("health_check.urls"))) -if apps.is_installed('cvat.apps.analytics_report'): - urlpatterns.append(path('api/', include('cvat.apps.analytics_report.urls'))) +if apps.is_installed("cvat.apps.analytics_report"): + urlpatterns.append(path("api/", include("cvat.apps.analytics_report.urls"))) diff --git a/cvat/utils/background_jobs.py b/cvat/utils/background_jobs.py index caf2e859a53..72c93eaeaf8 100644 --- a/cvat/utils/background_jobs.py +++ b/cvat/utils/background_jobs.py @@ -7,12 +7,9 @@ import django_rq + def schedule_job_with_throttling( - queue_name: str, - job_id_base: str, - scheduled_time: datetime, - func: Callable, - **func_kwargs + queue_name: str, job_id_base: str, scheduled_time: datetime, func: Callable, **func_kwargs ) -> None: """ This function schedules an RQ job to run at `scheduled_time`, diff --git a/cvat/utils/http.py b/cvat/utils/http.py index b2ed89a5d55..2cb1b7498b3 100644 --- a/cvat/utils/http.py +++ b/cvat/utils/http.py @@ -19,11 +19,12 @@ if settings.SMOKESCREEN_ENABLED: PROXIES_FOR_UNTRUSTED_URLS = { - 'http': 'http://localhost:4750', - 'https': 'http://localhost:4750', + "http": "http://localhost:4750", + "https": "http://localhost:4750", } + def make_requests_session() -> requests.Session: session = requests.Session() - session.headers['User-Agent'] = _CVAT_USER_AGENT + session.headers["User-Agent"] = _CVAT_USER_AGENT return session diff --git a/cvat/utils/remote_debugger.py b/cvat/utils/remote_debugger.py index b4d01baf3c3..bc6ef40ae0e 100644 --- a/cvat/utils/remote_debugger.py +++ b/cvat/utils/remote_debugger.py @@ -6,7 +6,8 @@ def is_debugging_enabled() -> bool: - return os.environ.get('CVAT_DEBUG_ENABLED') == 'yes' + return os.environ.get("CVAT_DEBUG_ENABLED") == "yes" + if is_debugging_enabled(): import debugpy @@ -21,8 +22,8 @@ class RemoteDebugger: Read more: https://modwsgi.readthedocs.io/en/develop/user-guides/debugging-techniques.html """ - ENV_VAR_PORT = 'CVAT_DEBUG_PORT' - ENV_VAR_WAIT = 'CVAT_DEBUG_WAIT' + ENV_VAR_PORT = "CVAT_DEBUG_PORT" + ENV_VAR_WAIT = "CVAT_DEBUG_WAIT" __debugger_initialized = False @classmethod @@ -35,7 +36,7 @@ def _singleton_init(cls): # The only intended use is in Docker. # Using 127.0.0.1 will not allow host connections - addr = ('0.0.0.0', port) # nosec - B104:hardcoded_bind_all_interfaces + addr = ("0.0.0.0", port) # nosec - B104:hardcoded_bind_all_interfaces # Debugpy is a singleton # We put it in the main thread of the process and then report new threads @@ -45,7 +46,7 @@ def _singleton_init(cls): # Feel free to enable if needed. debugpy.configure({"subProcess": False}) - if os.environ.get(cls.ENV_VAR_WAIT) == 'yes': + if os.environ.get(cls.ENV_VAR_WAIT) == "yes": debugpy.wait_for_client() except Exception as ex: raise Exception("failed to set debugger") from ex diff --git a/cvat/utils/version.py b/cvat/utils/version.py index ecc79eea705..8b1b53a1038 100644 --- a/cvat/utils/version.py +++ b/cvat/utils/version.py @@ -11,6 +11,7 @@ import os import subprocess + def get_version(version): """Return a PEP 440-compliant version number from VERSION.""" # Now build the two parts of the version number: @@ -20,21 +21,23 @@ def get_version(version): main = get_main_version(version) - sub = '' - if version[3] == 'alpha' and version[4] == 0: + sub = "" + if version[3] == "alpha" and version[4] == 0: git_changeset = get_git_changeset() if git_changeset: - sub = '.dev%s' % git_changeset + sub = ".dev%s" % git_changeset - elif version[3] != 'final': - mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'rc'} + elif version[3] != "final": + mapping = {"alpha": "a", "beta": "b", "rc": "rc"} sub = mapping[version[3]] + str(version[4]) return main + sub + def get_main_version(version): """Return main version (X.Y.Z) from VERSION.""" - return '.'.join(str(x) for x in version[:3]) + return ".".join(str(x) for x in version[:3]) + def get_git_changeset(): """Return a numeric identifier of the latest git changeset. @@ -44,14 +47,16 @@ def get_git_changeset(): so it's sufficient for generating the development version numbers. """ repo_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) - git_log = subprocess.Popen( # nosec: B603, B607 - ['git', 'log', '--pretty=format:%ct', '--quiet', '-1', 'HEAD'], - stdout=subprocess.PIPE, stderr=subprocess.PIPE, - cwd=repo_dir, universal_newlines=True, + git_log = subprocess.Popen( # nosec: B603, B607 + ["git", "log", "--pretty=format:%ct", "--quiet", "-1", "HEAD"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + cwd=repo_dir, + universal_newlines=True, ) timestamp = git_log.communicate()[0] try: timestamp = datetime.datetime.fromtimestamp(int(timestamp), tz=datetime.timezone.utc) except ValueError: return None - return timestamp.strftime('%Y%m%d%H%M%S') + return timestamp.strftime("%Y%m%d%H%M%S") diff --git a/dev/check_changelog_fragments.py b/dev/check_changelog_fragments.py index d417bcd669f..e837842efaf 100755 --- a/dev/check_changelog_fragments.py +++ b/dev/check_changelog_fragments.py @@ -6,17 +6,18 @@ REPO_ROOT = Path(__file__).resolve().parents[1] + def main(): scriv_config = configparser.ConfigParser() - scriv_config.read(REPO_ROOT / 'changelog.d/scriv.ini') + scriv_config.read(REPO_ROOT / "changelog.d/scriv.ini") - scriv_section = scriv_config['scriv'] - assert scriv_section['format'] == 'md' + scriv_section = scriv_config["scriv"] + assert scriv_section["format"] == "md" - md_header_level = int(scriv_section['md_header_level']) - md_header_prefix = '#' * md_header_level + '# ' + md_header_level = int(scriv_section["md_header_level"]) + md_header_prefix = "#" * md_header_level + "# " - categories = {s.strip() for s in scriv_section['categories'].split(',')} + categories = {s.strip() for s in scriv_section["categories"].split(",")} success = True @@ -25,12 +26,12 @@ def complain(message): success = False print(f"{fragment_path.relative_to(REPO_ROOT)}:{line_index+1}: {message}", file=sys.stderr) - for fragment_path in REPO_ROOT.glob('changelog.d/*.md'): + for fragment_path in REPO_ROOT.glob("changelog.d/*.md"): with open(fragment_path) as fragment_file: for line_index, line in enumerate(fragment_file): if not line.startswith(md_header_prefix): # The first line should be a header, and all headers should be of appropriate level. - if line_index == 0 or line.startswith('#'): + if line_index == 0 or line.startswith("#"): complain(f"line should start with {md_header_prefix!r}") continue @@ -40,4 +41,5 @@ def complain(message): sys.exit(0 if success else 1) + main() diff --git a/dev/update_version.py b/dev/update_version.py index 6cdaf313f96..ed8d08a40f4 100755 --- a/dev/update_version.py +++ b/dev/update_version.py @@ -9,40 +9,43 @@ from typing import Callable, Match, Pattern -SUCCESS_CHAR = '\u2714' -FAIL_CHAR = '\u2716' +SUCCESS_CHAR = "\u2714" +FAIL_CHAR = "\u2716" -CVAT_VERSION_PATTERN = re.compile(r'VERSION\s*=\s*\((\d+),\s*(\d*),\s*(\d+),\s*[\',\"](\w+)[\',\"],\s*(\d+)\)') +CVAT_VERSION_PATTERN = re.compile( + r"VERSION\s*=\s*\((\d+),\s*(\d*),\s*(\d+),\s*[\',\"](\w+)[\',\"],\s*(\d+)\)" +) REPO_ROOT_DIR = Path(__file__).resolve().parents[1] -CVAT_INIT_PY_REL_PATH = 'cvat/__init__.py' +CVAT_INIT_PY_REL_PATH = "cvat/__init__.py" CVAT_INIT_PY_PATH = REPO_ROOT_DIR / CVAT_INIT_PY_REL_PATH + @dataclass() class Version: major: int = 0 minor: int = 0 patch: int = 0 - prerelease: str = '' + prerelease: str = "" prerelease_number: int = 0 def __str__(self) -> str: - return f'{self.major}.{self.minor}.{self.patch}-{self.prerelease}.{self.prerelease_number}' + return f"{self.major}.{self.minor}.{self.patch}-{self.prerelease}.{self.prerelease_number}" def cvat_repr(self): - return f"({self.major}, {self.minor}, {self.patch}, '{self.prerelease}', {self.prerelease_number})" + return f'({self.major}, {self.minor}, {self.patch}, "{self.prerelease}", {self.prerelease_number})' def compose_repr(self): - if self.prerelease != 'final': - return 'dev' - return f'v{self.major}.{self.minor}.{self.patch}' + if self.prerelease != "final": + return "dev" + return f"v{self.major}.{self.minor}.{self.patch}" def increment_prerelease_number(self) -> None: self.prerelease_number += 1 def increment_prerelease(self) -> None: - flow = ('alpha', 'beta', 'rc', 'final') + flow = ("alpha", "beta", "rc", "final") idx = flow.index(self.prerelease) if idx == len(flow) - 1: raise ValueError(f"Cannot increment current '{self.prerelease}' prerelease version") @@ -51,9 +54,9 @@ def increment_prerelease(self) -> None: self._set_default_prerelease_number() def set_prerelease(self, value: str) -> None: - values = ('alpha', 'beta', 'rc', 'final') + values = ("alpha", "beta", "rc", "final") if value not in values: - raise ValueError(f'{value} is a wrong, must be one of {values}') + raise ValueError(f"{value} is a wrong, must be one of {values}") self.prerelease = value self._set_default_prerelease_number() @@ -71,15 +74,15 @@ def increment_major(self) -> None: self._set_default_minor() def set(self, v: str) -> None: - self.major, self.minor, self.patch = map(int, v.split('.')) - self.prerelease = 'final' + self.major, self.minor, self.patch = map(int, v.split(".")) + self.prerelease = "final" self.prerelease_number = 0 def _set_default_prerelease_number(self) -> None: self.prerelease_number = 0 def _set_default_prerelease(self) -> None: - self.prerelease = 'alpha' + self.prerelease = "alpha" self._set_default_prerelease_number() def _set_default_patch(self) -> None: @@ -90,6 +93,7 @@ def _set_default_minor(self) -> None: self.minor = 0 self._set_default_patch() + @dataclass(frozen=True) class ReplacementRule: rel_path: str @@ -101,89 +105,113 @@ def apply(self, new_version: Version, *, verify_only: bool) -> bool: text = path.read_text() new_text, num_replacements = self.pattern.subn( - functools.partial(self.replacement, new_version), text) + functools.partial(self.replacement, new_version), text + ) if not num_replacements: - print(f'{FAIL_CHAR} {self.rel_path}: failed to match version pattern.') + print(f"{FAIL_CHAR} {self.rel_path}: failed to match version pattern.") return False if text == new_text: if verify_only: - print(f'{SUCCESS_CHAR} {self.rel_path}: verified.') + print(f"{SUCCESS_CHAR} {self.rel_path}: verified.") else: - print(f'{SUCCESS_CHAR} {self.rel_path}: no need to update.') + print(f"{SUCCESS_CHAR} {self.rel_path}: no need to update.") else: if verify_only: - print(f'{FAIL_CHAR} {self.rel_path}: verification failed.') + print(f"{FAIL_CHAR} {self.rel_path}: verification failed.") return False else: path.write_text(new_text) - print(f'{SUCCESS_CHAR} {self.rel_path}: updated.') + print(f"{SUCCESS_CHAR} {self.rel_path}: updated.") return True -REPLACEMENT_RULES = [ - ReplacementRule(CVAT_INIT_PY_REL_PATH, CVAT_VERSION_PATTERN, - lambda v, m: f'VERSION = {v.cvat_repr()}'), - - ReplacementRule('docker-compose.yml', - re.compile(r'(\$\{CVAT_VERSION:-)([\w.]+)(\})'), - lambda v, m: m[1] + v.compose_repr() + m[3]), - - ReplacementRule('helm-chart/values.yaml', - re.compile(r'(^ image: cvat/(?:ui|server)\n tag: )([\w.]+)', re.M), - lambda v, m: m[1] + v.compose_repr()), - ReplacementRule('cvat-sdk/gen/generate.sh', +REPLACEMENT_RULES = [ + ReplacementRule( + CVAT_INIT_PY_REL_PATH, CVAT_VERSION_PATTERN, lambda v, m: f"VERSION = {v.cvat_repr()}" + ), + ReplacementRule( + "docker-compose.yml", + re.compile(r"(\$\{CVAT_VERSION:-)([\w.]+)(\})"), + lambda v, m: m[1] + v.compose_repr() + m[3], + ), + ReplacementRule( + "helm-chart/values.yaml", + re.compile(r"(^ image: cvat/(?:ui|server)\n tag: )([\w.]+)", re.M), + lambda v, m: m[1] + v.compose_repr(), + ), + ReplacementRule( + "cvat-sdk/gen/generate.sh", re.compile(r'^VERSION="[\d.]+"$', re.M), - lambda v, m: f'VERSION="{v.major}.{v.minor}.{v.patch}"'), - - ReplacementRule('cvat/schema.yml', + lambda v, m: f'VERSION="{v.major}.{v.minor}.{v.patch}"', + ), + ReplacementRule( + "cvat/schema.yml", re.compile(r"^ version: [\d.]+$", re.M), - lambda v, m: f' version: {v.major}.{v.minor}.{v.patch}'), - - ReplacementRule('cvat-cli/src/cvat_cli/version.py', + lambda v, m: f" version: {v.major}.{v.minor}.{v.patch}", + ), + ReplacementRule( + "cvat-cli/src/cvat_cli/version.py", re.compile(r'^VERSION = "[\d.]+"$', re.M), - lambda v, m: f'VERSION = "{v.major}.{v.minor}.{v.patch}"'), - - ReplacementRule('cvat-cli/requirements/base.txt', - re.compile(r'^cvat-sdk~=[\d.]+$', re.M), - lambda v, m: f'cvat-sdk~={v.major}.{v.minor}.{v.patch}'), + lambda v, m: f'VERSION = "{v.major}.{v.minor}.{v.patch}"', + ), + ReplacementRule( + "cvat-cli/requirements/base.txt", + re.compile(r"^cvat-sdk~=[\d.]+$", re.M), + lambda v, m: f"cvat-sdk~={v.major}.{v.minor}.{v.patch}", + ), ] + def get_current_version() -> Version: version_text = CVAT_INIT_PY_PATH.read_text() match = re.search(CVAT_VERSION_PATTERN, version_text) if not match: - raise RuntimeError(f'Failed to find version in {CVAT_INIT_PY_PATH}') + raise RuntimeError(f"Failed to find version in {CVAT_INIT_PY_PATH}") return Version(int(match[1]), int(match[2]), int(match[3]), match[4], int(match[5])) + def main() -> None: - parser = argparse.ArgumentParser(description='Bump CVAT version') + parser = argparse.ArgumentParser(description="Bump CVAT version") action_group = parser.add_mutually_exclusive_group(required=True) - action_group.add_argument('--major', action='store_true', - help='Increment the existing major version by 1') - action_group.add_argument('--minor', action='store_true', - help='Increment the existing minor version by 1') - action_group.add_argument('--patch', action='store_true', - help='Increment the existing patch version by 1') - action_group.add_argument('--prerelease', nargs='?', const='increment', - help='''Increment prerelease version alpha->beta->rc->final, - Also it's possible to pass value explicitly''') - action_group.add_argument('--prerelease_number', action='store_true', - help='Increment prerelease number by 1') - - action_group.add_argument('--current', '--show-current', - action='store_true', help='Display current version') - action_group.add_argument('--verify-current', - action='store_true', help='Check that all version numbers are consistent') - - action_group.add_argument('--set', metavar='X.Y.Z', - help='Set the version to the specified version') + action_group.add_argument( + "--major", action="store_true", help="Increment the existing major version by 1" + ) + action_group.add_argument( + "--minor", action="store_true", help="Increment the existing minor version by 1" + ) + action_group.add_argument( + "--patch", action="store_true", help="Increment the existing patch version by 1" + ) + action_group.add_argument( + "--prerelease", + nargs="?", + const="increment", + help="""Increment prerelease version alpha->beta->rc->final, + Also it's possible to pass value explicitly""", + ) + action_group.add_argument( + "--prerelease_number", action="store_true", help="Increment prerelease number by 1" + ) + + action_group.add_argument( + "--current", "--show-current", action="store_true", help="Display current version" + ) + action_group.add_argument( + "--verify-current", + action="store_true", + help="Check that all version numbers are consistent", + ) + + action_group.add_argument( + "--set", metavar="X.Y.Z", help="Set the version to the specified version" + ) args = parser.parse_args() @@ -201,7 +229,7 @@ def main() -> None: version.increment_prerelease_number() elif args.prerelease: - if args.prerelease == 'increment': + if args.prerelease == "increment": version.increment_prerelease() else: version.set_prerelease(args.prerelease) @@ -222,9 +250,9 @@ def main() -> None: assert False, "Unreachable code" if verify_only: - print(f'Verifying that version is {version}...') + print(f"Verifying that version is {version}...") else: - print(f'Bumping version to {version}...') + print(f"Bumping version to {version}...") print() success = True @@ -239,5 +267,6 @@ def main() -> None: else: sys.exit("\nFailed to update one or more files!") -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/pyproject.toml b/pyproject.toml index 6d077245157..528bdc579fc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,3 +7,14 @@ skip_gitignore = true # align tool behavior with Black [tool.black] line-length = 100 target-version = ['py39'] +extend-exclude = """ +# TODO: get rid of these +^/cvat/apps/( + dataset_manager|dataset_repo|engine|events + |health|iam|lambda_manager|log_viewer + |organizations|webhooks +)/ +| ^/cvat/settings/ +| ^/serverless/ +| ^/utils/dataset_manifest/ +""" diff --git a/rqscheduler.py b/rqscheduler.py index 82b7499baf8..5ae76e64a7f 100644 --- a/rqscheduler.py +++ b/rqscheduler.py @@ -9,5 +9,5 @@ from rq_scheduler.scripts import rqscheduler -if __name__ == '__main__': +if __name__ == "__main__": rqscheduler.main() diff --git a/site/build_docs.py b/site/build_docs.py index 25af0b0e8f8..2eca3a94133 100755 --- a/site/build_docs.py +++ b/site/build_docs.py @@ -157,9 +157,7 @@ def validate_env(): try: subprocess.run([hugo, "version"], capture_output=True) # nosec except (subprocess.CalledProcessError, FileNotFoundError) as ex: - raise Exception( - f"Failed to run '{hugo}', please make sure it exists." - ) from ex + raise Exception(f"Failed to run '{hugo}', please make sure it exists.") from ex if __name__ == "__main__": diff --git a/site/process_sdk_docs.py b/site/process_sdk_docs.py index 3b194124841..03324aea691 100755 --- a/site/process_sdk_docs.py +++ b/site/process_sdk_docs.py @@ -25,9 +25,7 @@ def __init__(self, *, input_dir: str, site_root: str) -> None: self._site_root = site_root self._content_dir = osp.join(self._site_root, "content") - self._sdk_reference_dir = osp.join( - self._content_dir, "en/docs/api_sdk/sdk/reference" - ) + self._sdk_reference_dir = osp.join(self._content_dir, "en/docs/api_sdk/sdk/reference") self._templates_dir = osp.join(self._site_root, "templates") @staticmethod @@ -97,9 +95,7 @@ def _move_api_summary(self): apis_index_filename = osp.join( osp.relpath(self._sdk_reference_dir, self._content_dir), "apis/_index.md" ) - apis_index_path = osp.join( - self._templates_dir, apis_index_filename + ".template" - ) + apis_index_path = osp.join(self._templates_dir, apis_index_filename + ".template") with open(apis_index_path) as f: contents = f.read() @@ -126,9 +122,7 @@ def _fix_page_links_and_references(self): os.rename(src_path, dst_path) mapping[src_filename] = dst_filename - self._reference_files = [ - osp.join(self._sdk_reference_dir, p) for p in mapping.values() - ] + self._reference_files = [osp.join(self._sdk_reference_dir, p) for p in mapping.values()] for p in iglob(self._sdk_reference_dir + "/**/*.md", recursive=True): with open(p) as f: @@ -146,9 +140,7 @@ def _fix_page_links_and_references(self): with open(p, "w") as f: f.write(contents) - def _process_non_code_blocks( - self, text: str, handlers: List[Callable[[str], str]] - ) -> str: + def _process_non_code_blocks(self, text: str, handlers: List[Callable[[str], str]]) -> str: """ Allows to process Markdown documents with passed callbacks. Callbacks are only executed outside code blocks. diff --git a/utils/__init__.py b/utils/__init__.py index d0af4b96794..6370694b5ea 100644 --- a/utils/__init__.py +++ b/utils/__init__.py @@ -1,4 +1,3 @@ # Copyright (C) 2022 Intel Corporation # # SPDX-License-Identifier: MIT - diff --git a/utils/dicom_converter/script.py b/utils/dicom_converter/script.py index 23a1e7526e3..3fe7ef0be6d 100644 --- a/utils/dicom_converter/script.py +++ b/utils/dicom_converter/script.py @@ -16,10 +16,20 @@ # Script configuration -logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(message)s') -parser = argparse.ArgumentParser(description='The script is used to convert some kinds of DICOM (.dcm) files to regular image files (.png)') -parser.add_argument('input', type=str, help='A root directory with medical data files in DICOM format. The script finds all these files based on their extension') -parser.add_argument('output', type=str, help='Where to save converted files. The script repeats internal directories structure of the input root directory') +logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(message)s") +parser = argparse.ArgumentParser( + description="The script is used to convert some kinds of DICOM (.dcm) files to regular image files (.png)" +) +parser.add_argument( + "input", + type=str, + help="A root directory with medical data files in DICOM format. The script finds all these files based on their extension", +) +parser.add_argument( + "output", + type=str, + help="Where to save converted files. The script repeats internal directories structure of the input root directory", +) args = parser.parse_args() @@ -32,11 +42,11 @@ def __init__(self, filename): self._max_value = ds.pixel_array.max() self._depth = ds.BitsStored - logging.debug('File: {}'.format(filename)) - logging.debug('Photometric interpretation: {}'.format(self._photometric_interpretation)) - logging.debug('Min value: {}'.format(self._min_value)) - logging.debug('Max value: {}'.format(self._max_value)) - logging.debug('Depth: {}'.format(self._depth)) + logging.debug("File: {}".format(filename)) + logging.debug("Photometric interpretation: {}".format(self._photometric_interpretation)) + logging.debug("Min value: {}".format(self._min_value)) + logging.debug("Max value: {}".format(self._max_value)) + logging.debug("Depth: {}".format(self._depth)) try: self._length = ds["NumberOfFrames"].value @@ -53,38 +63,40 @@ def __iter__(self): for pixel_array in self._pixel_array: # Normalization to an output range 0..255, 0..65535 pixel_array = pixel_array - self._min_value - pixel_array = pixel_array.astype(int) * (2 ** self._depth - 1) + pixel_array = pixel_array.astype(int) * (2**self._depth - 1) pixel_array = pixel_array // (self._max_value - self._min_value) # In some cases we need to convert colors additionally - if 'YBR' in self._photometric_interpretation: - pixel_array = convert_color_space(pixel_array, self._photometric_interpretation, 'RGB') + if "YBR" in self._photometric_interpretation: + pixel_array = convert_color_space( + pixel_array, self._photometric_interpretation, "RGB" + ) if self._depth == 8: image = Image.fromarray(pixel_array.astype(np.uint8)) elif self._depth == 16: image = Image.fromarray(pixel_array.astype(np.uint16)) else: - raise Exception('Not supported depth {}'.format(self._depth)) + raise Exception("Not supported depth {}".format(self._depth)) yield image def main(root_dir, output_root_dir): - dicom_files = glob(os.path.join(root_dir, '**', '*.dcm'), recursive = True) + dicom_files = glob(os.path.join(root_dir, "**", "*.dcm"), recursive=True) if not len(dicom_files): - logging.info('DICOM files are not found under the specified path') + logging.info("DICOM files are not found under the specified path") else: - logging.info('Number of found DICOM files: ' + str(len(dicom_files))) + logging.info("Number of found DICOM files: " + str(len(dicom_files))) pbar = tqdm(dicom_files) for input_filename in pbar: - pbar.set_description('Conversion: ' + input_filename) + pbar.set_description("Conversion: " + input_filename) input_basename = os.path.basename(input_filename) output_subpath = os.path.relpath(os.path.dirname(input_filename), root_dir) output_path = os.path.join(output_root_dir, output_subpath) - output_basename = '{}.png'.format(os.path.splitext(input_basename)[0]) + output_basename = "{}.png".format(os.path.splitext(input_basename)[0]) output_filename = os.path.join(output_path, output_basename) if not os.path.exists(output_path): @@ -98,16 +110,19 @@ def main(root_dir, output_root_dir): image.save(output_filename) else: filename_index = str(i).zfill(len(str(length))) - list_output_filename = '{}_{}.png'.format(os.path.splitext(output_filename)[0], filename_index) + list_output_filename = "{}_{}.png".format( + os.path.splitext(output_filename)[0], filename_index + ) image.save(list_output_filename) except Exception as ex: - logging.error('Error while processing ' + input_filename) + logging.error("Error while processing " + input_filename) logging.error(ex) -if __name__ == '__main__': + +if __name__ == "__main__": input_root_path = os.path.abspath(args.input.rstrip(os.sep)) output_root_path = os.path.abspath(args.output.rstrip(os.sep)) - logging.info('From: {}'.format(input_root_path)) - logging.info('To: {}'.format(output_root_path)) + logging.info("From: {}".format(input_root_path)) + logging.info("To: {}".format(output_root_path)) main(input_root_path, output_root_path)