Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

dev: Add Ruff C4 rules #1072

Merged
merged 1 commit into from
Dec 27, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion api/internal/commit/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def get_report(self, commit: Commit):
for filename in report.files:
file_report = report.get(filename)
file_totals = CommitTotalsSerializer(
{key: val for key, val in zip(TOTALS_MAP, file_report.totals)}
dict(zip(TOTALS_MAP, file_report.totals))
)
files.append(
{
Expand Down
2 changes: 1 addition & 1 deletion api/internal/tests/views/test_account_viewset.py
Original file line number Diff line number Diff line change
Expand Up @@ -960,7 +960,7 @@ def test_update_team_plan_must_fail_if_too_many_activated_users_during_trial(sel
self.current_owner.plan = PlanName.BASIC_PLAN_NAME.value
self.current_owner.plan_user_count = 1
self.current_owner.trial_status = TrialStatus.ONGOING.value
self.current_owner.plan_activated_users = [i for i in range(11)]
self.current_owner.plan_activated_users = list(range(11))
self.current_owner.save()

desired_plans = [
Expand Down
8 changes: 4 additions & 4 deletions codecov_auth/tests/test_admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -480,8 +480,8 @@ def test_stale_user_cleanup():

# remove stale users with default > 90 days
removed_users, affected_orgs = find_and_remove_stale_users(orgs)
assert removed_users == set([users[0].ownerid, users[2].ownerid, users[4].ownerid])
assert affected_orgs == set([orgs[0].ownerid, orgs[1].ownerid])
assert removed_users == {users[0].ownerid, users[2].ownerid, users[4].ownerid}
assert affected_orgs == {orgs[0].ownerid, orgs[1].ownerid}

orgs = list(
Owner.objects.filter(ownerid__in=[org.ownerid for org in orgs])
Expand All @@ -493,8 +493,8 @@ def test_stale_user_cleanup():

# remove even more stale users
removed_users, affected_orgs = find_and_remove_stale_users(orgs, timedelta(days=30))
assert removed_users == set([users[1].ownerid, users[3].ownerid])
assert affected_orgs == set([orgs[0].ownerid, orgs[1].ownerid])
assert removed_users == {users[1].ownerid, users[3].ownerid}
assert affected_orgs == {orgs[0].ownerid, orgs[1].ownerid}

orgs = list(
Owner.objects.filter(ownerid__in=[org.ownerid for org in orgs])
Expand Down
4 changes: 2 additions & 2 deletions codecov_auth/views/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -302,14 +302,14 @@ def _check_enterprise_organizations_membership(self, user_dict, orgs):
"""Checks if a user belongs to the restricted organizations (or teams if GitHub) allowed in settings."""
if settings.IS_ENTERPRISE and get_config(self.service, "organizations"):
orgs_in_settings = set(get_config(self.service, "organizations"))
orgs_in_user = set(org["username"] for org in orgs)
orgs_in_user = {org["username"] for org in orgs}
if not (orgs_in_settings & orgs_in_user):
raise PermissionDenied(
"You must be a member of an organization listed in the Codecov Enterprise setup."
)
if get_config(self.service, "teams") and "teams" in user_dict:
teams_in_settings = set(get_config(self.service, "teams"))
teams_in_user = set([team["name"] for team in user_dict["teams"]])
teams_in_user = {team["name"] for team in user_dict["teams"]}
if not (teams_in_settings & teams_in_user):
raise PermissionDenied(
"You must be a member of an allowed team in your organization."
Expand Down
2 changes: 1 addition & 1 deletion core/signals.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def update_repository(
changes: Dict[str, Any] = instance.tracker.changed()
tracked_fields: List[str] = ["name", "upload_token", "activated", "active"]

if created or any([field in changes for field in tracked_fields]):
if created or any(field in changes for field in tracked_fields):
data = {
"type": "repo",
"sync": "one",
Expand Down
2 changes: 1 addition & 1 deletion graphql_api/dataloader/comparison.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def batch_queryset(self, keys):

async def batch_load_fn(self, keys):
# flat list of all commits involved in all comparisons
commitids = set(commitid for key in keys for commitid in key)
commitids = {commitid for key in keys for commitid in key}

commit_loader = CommitLoader.loader(self.info, self.repository_id)
commits = await commit_loader.load_many(commitids)
Expand Down
4 changes: 2 additions & 2 deletions graphql_api/tests/test_owner_measurements.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ def test_repository_filtering_by_public_private(
]["measurements"]
params = owner_coverage_measurements_with_fallback.call_args.args
# Check that the call is using both private and public repos
assert set(params[1]) == set([self.repo1.pk, self.repo2.pk])
assert set(params[1]) == {self.repo1.pk, self.repo2.pk}

query = f"""
query Measurements {{
Expand All @@ -183,4 +183,4 @@ def test_repository_filtering_by_public_private(
self.gql_request(query, owner=self.owner)["owner"]["measurements"]
params = owner_coverage_measurements_with_fallback.call_args.args
# Check that the call is using both private and public repos
assert set(params[1]) == set([self.repo1.pk, self.repo2.pk])
assert set(params[1]) == {self.repo1.pk, self.repo2.pk}
8 changes: 2 additions & 6 deletions graphql_api/types/bundle_analysis/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -307,9 +307,7 @@ def resolve_bundle_report_measurements(

# All measureable names we need to fetch to compute the requested asset types
if not asset_types:
measurables_to_fetch = [
item for item in list(BundleAnalysisMeasurementsAssetType)
]
measurables_to_fetch = list(BundleAnalysisMeasurementsAssetType)
elif ASSET_TYPE_UNKNOWN in asset_types:
measurables_to_fetch = [
BundleAnalysisMeasurementsAssetType.REPORT_SIZE,
Expand All @@ -332,9 +330,7 @@ def resolve_bundle_report_measurements(

# All measureable name we need to return
if not asset_types:
measurables_to_display = [
item for item in list(BundleAnalysisMeasurementsAssetType)
]
measurables_to_display = list(BundleAnalysisMeasurementsAssetType)
else:
measurables_to_display = [
BundleAnalysisMeasurementsAssetType[item]
Expand Down
12 changes: 5 additions & 7 deletions graphql_api/types/plan_representation/plan_representation.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,14 +36,12 @@ def resolve_base_unit_price(plan_data: PlanData, info) -> int:
def resolve_benefits(plan_data: PlanData, info) -> List[str]:
plan_service: PlanService = info.context["plan_service"]
if plan_service.is_org_trialing:
benefits_with_pretrial_users = list(
map(
lambda benefit: benefit.replace(
"Up to 1 user", f"Up to {plan_service.pretrial_users_count} users"
),
plan_data["benefits"],
benefits_with_pretrial_users = [
benefit.replace(
"Up to 1 user", f"Up to {plan_service.pretrial_users_count} users"
)
)
for benefit in plan_data["benefits"]
]
return benefits_with_pretrial_users
return plan_data["benefits"]

Expand Down
3 changes: 2 additions & 1 deletion ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ target-version = "py312"
# https://docs.astral.sh/ruff/rules/
select = [
"ASYNC", # flake8-async - async checks
"C4", # flake8-comprehensions - list/set/dict/generator comprehensions
"E", # pycodestyle - error rules
"F", # pyflakes - general Python errors, undefined names
"I", # isort - import sorting
Expand All @@ -47,7 +48,7 @@ select = [
"PLE", # pylint - error rules
"W", # pycodestyle - warning rules
]
ignore = ["F405", "F403", "E501", "E712"]
ignore = ["F405", "F403", "E501", "E712", "C408"]

# Allow fix for all enabled rules (when `--fix`) is provided.
# The preferred method (for now) w.r.t. fixable rules is to manually update the makefile
Expand Down
2 changes: 1 addition & 1 deletion services/bundle_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,7 @@

@cached_property
def module_extensions(self) -> List[str]:
return list(set([module.extension for module in self.modules]))
return list({module.extension for module in self.modules})

Check warning on line 243 in services/bundle_analysis.py

View check run for this annotation

Codecov Notifications / codecov/patch

services/bundle_analysis.py#L243

Added line #L243 was not covered by tests

@cached_property
def routes(self) -> Optional[List[str]]:
Expand Down
8 changes: 4 additions & 4 deletions services/profiling.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,9 +96,9 @@ def _get_critical_files_from_yaml(
file
for file in report.files
if any(
map(
lambda regex_patt: regex.match(regex_patt, file, timeout=2),
compiled_files_paths,
(
regex.match(regex_patt, file, timeout=2)
for regex_patt in compiled_files_paths
)
)
]
Expand Down Expand Up @@ -133,4 +133,4 @@ def critical_files(self) -> List[CriticalFile]:

@cached_property
def critical_filenames(self) -> set[str]:
return set([file.name for file in self.critical_files])
return {file.name for file in self.critical_files}
2 changes: 1 addition & 1 deletion timeseries/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -418,7 +418,7 @@ def owner_coverage_measurements_with_fallback(
else:
if settings.TIMESERIES_ENABLED:
# we need to backfill some datasets
dataset_repo_ids = set(dataset.repository_id for dataset in datasets)
dataset_repo_ids = {dataset.repository_id for dataset in datasets}
missing_dataset_repo_ids = set(repo_ids) - dataset_repo_ids
created_datasets = Dataset.objects.bulk_create(
[
Expand Down
16 changes: 8 additions & 8 deletions upload/tests/views/test_upload_coverage.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,9 +139,9 @@ def test_upload_coverage_post(db, mocker):
).first()
assert response.status_code == 201
assert all(
map(
lambda x: x in response_json.keys(),
["external_id", "created_at", "raw_upload_location", "url"],
(
x in response_json.keys()
for x in ["external_id", "created_at", "raw_upload_location", "url"]
)
)
assert (
Expand Down Expand Up @@ -172,7 +172,7 @@ def test_upload_coverage_post(db, mocker):
assert UploadFlagMembership.objects.filter(
report_session_id=upload.id, flag_id=flag2.id
).exists()
assert [flag for flag in upload.flags.all()] == [flag1, flag2]
assert list(upload.flags.all()) == [flag1, flag2]

archive_service = ArchiveService(repository)
assert upload.storage_path == MinioEndpoints.raw_with_upload_id.get_path(
Expand Down Expand Up @@ -242,9 +242,9 @@ def test_upload_coverage_post_shelter(db, mocker):
).first()
assert response.status_code == 201
assert all(
map(
lambda x: x in response_json.keys(),
["external_id", "created_at", "raw_upload_location", "url"],
(
x in response_json.keys()
for x in ["external_id", "created_at", "raw_upload_location", "url"]
)
)
assert (
Expand Down Expand Up @@ -275,7 +275,7 @@ def test_upload_coverage_post_shelter(db, mocker):
assert UploadFlagMembership.objects.filter(
report_session_id=upload.id, flag_id=flag2.id
).exists()
assert [flag for flag in upload.flags.all()] == [flag1, flag2]
assert list(upload.flags.all()) == [flag1, flag2]

assert upload.storage_path == "shelter/test/path.txt"
presigned_put_mock.assert_called_with("archive", upload.storage_path, 10)
Expand Down
32 changes: 16 additions & 16 deletions upload/tests/views/test_uploads.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,9 +243,9 @@ def test_uploads_post(db, mocker, mock_redis):
).first()
assert response.status_code == 201
assert all(
map(
lambda x: x in response_json.keys(),
["external_id", "created_at", "raw_upload_location", "url"],
(
x in response_json.keys()
for x in ["external_id", "created_at", "raw_upload_location", "url"]
)
)
assert (
Expand Down Expand Up @@ -276,7 +276,7 @@ def test_uploads_post(db, mocker, mock_redis):
assert UploadFlagMembership.objects.filter(
report_session_id=upload.id, flag_id=flag2.id
).exists()
assert [flag for flag in upload.flags.all()] == [flag1, flag2]
assert list(upload.flags.all()) == [flag1, flag2]

archive_service = ArchiveService(repository)
assert upload.storage_path == MinioEndpoints.raw_with_upload_id.get_path(
Expand Down Expand Up @@ -357,9 +357,9 @@ def test_uploads_post_tokenless(db, mocker, mock_redis, private, branch, branch_
state="started",
).first()
assert all(
map(
lambda x: x in response_json.keys(),
["external_id", "created_at", "raw_upload_location", "url"],
(
x in response_json.keys()
for x in ["external_id", "created_at", "raw_upload_location", "url"]
)
)
assert (
Expand Down Expand Up @@ -390,7 +390,7 @@ def test_uploads_post_tokenless(db, mocker, mock_redis, private, branch, branch_
assert UploadFlagMembership.objects.filter(
report_session_id=upload.id, flag_id=flag2.id
).exists()
assert [flag for flag in upload.flags.all()] == [flag1, flag2]
assert list(upload.flags.all()) == [flag1, flag2]

archive_service = ArchiveService(repository)
assert upload.storage_path == MinioEndpoints.raw_with_upload_id.get_path(
Expand Down Expand Up @@ -509,9 +509,9 @@ def test_uploads_post_token_required_auth_check(
state="started",
).first()
assert all(
map(
lambda x: x in response_json.keys(),
["external_id", "created_at", "raw_upload_location", "url"],
(
x in response_json.keys()
for x in ["external_id", "created_at", "raw_upload_location", "url"]
)
)
assert (
Expand Down Expand Up @@ -542,7 +542,7 @@ def test_uploads_post_token_required_auth_check(
assert UploadFlagMembership.objects.filter(
report_session_id=upload.id, flag_id=flag2.id
).exists()
assert [flag for flag in upload.flags.all()] == [flag1, flag2]
assert list(upload.flags.all()) == [flag1, flag2]

archive_service = ArchiveService(repository)
assert upload.storage_path == MinioEndpoints.raw_with_upload_id.get_path(
Expand Down Expand Up @@ -643,9 +643,9 @@ def test_uploads_post_github_oidc_auth(
state="started",
).first()
assert all(
map(
lambda x: x in response_json.keys(),
["external_id", "created_at", "raw_upload_location", "url"],
(
x in response_json.keys()
for x in ["external_id", "created_at", "raw_upload_location", "url"]
)
)
assert (
Expand Down Expand Up @@ -676,7 +676,7 @@ def test_uploads_post_github_oidc_auth(
assert UploadFlagMembership.objects.filter(
report_session_id=upload.id, flag_id=flag2.id
).exists()
assert [flag for flag in upload.flags.all()] == [flag1, flag2]
assert list(upload.flags.all()) == [flag1, flag2]

archive_service = ArchiveService(repository)
assert upload.storage_path == MinioEndpoints.raw_with_upload_id.get_path(
Expand Down
6 changes: 3 additions & 3 deletions upload/views/empty_upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,9 +149,9 @@ def post(self, request: HttpRequest, *args: Any, **kwargs: Any) -> Response:
file
for file in changed_files
if any(
map(
lambda regex_patt: regex.match(regex_patt, file, timeout=2),
compiled_files_to_ignore,
(
regex.match(regex_patt, file, timeout=2)
for regex_patt in compiled_files_to_ignore
)
)
]
Expand Down
24 changes: 12 additions & 12 deletions webhook_handlers/views/github.py
Original file line number Diff line number Diff line change
Expand Up @@ -433,12 +433,12 @@ def _handle_installation_repository_events(self, request, *args, **kwargs):
ghapp_installation.repository_service_ids = None
else:
repo_list_to_save = set(ghapp_installation.repository_service_ids or [])
repositories_added_service_ids = set(
map(lambda obj: obj["id"], request.data.get("repositories_added", []))
)
repositories_removed_service_ids = set(
map(lambda obj: obj["id"], request.data.get("repositories_removed", []))
)
repositories_added_service_ids = {
obj["id"] for obj in request.data.get("repositories_added", [])
}
repositories_removed_service_ids = {
obj["id"] for obj in request.data.get("repositories_removed", [])
}
repo_list_to_save = repo_list_to_save.union(
repositories_added_service_ids
).difference(repositories_removed_service_ids)
Expand Down Expand Up @@ -501,9 +501,9 @@ def _handle_installation_events(
if affects_all_repositories:
ghapp_installation.repository_service_ids = None
else:
repositories_service_ids = list(
map(lambda obj: obj["id"], request.data.get("repositories", []))
)
repositories_service_ids = [
obj["id"] for obj in request.data.get("repositories", [])
]
ghapp_installation.repository_service_ids = repositories_service_ids

if action in ["suspend", "unsuspend"]:
Expand Down Expand Up @@ -538,9 +538,9 @@ def _handle_installation_events(
+ request.data.get("repositories_added", [])
+ request.data.get("repositories_removed", [])
)
repos_affected_clean = set(
map(lambda obj: (obj["id"], obj["node_id"]), repos_affected)
)
repos_affected_clean = {
(obj["id"], obj["node_id"]) for obj in repos_affected
}

TaskService().refresh(
ownerid=owner.ownerid,
Expand Down
Loading