Skip to content

Commit

Permalink
chore: update feature interface (#421)
Browse files Browse the repository at this point in the history
* update interface

* update more interfaces

* update shared

* fix missing test

* update shared
  • Loading branch information
daniel-codecov authored May 1, 2024
1 parent f64dc25 commit 4b32438
Show file tree
Hide file tree
Showing 10 changed files with 21 additions and 23 deletions.
2 changes: 1 addition & 1 deletion conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -325,7 +325,7 @@ def mock_feature(mocker, request):

from shared.rollouts import Feature

def check_value(self, *, owner_id=None, repo_id=None, default=False):
def check_value(self, identifier, default=False):
return default

return mocker.patch.object(Feature, "check_value", check_value)
2 changes: 1 addition & 1 deletion requirements.in
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
https://github.com/codecov/shared/archive/f8e95d52d6182d4fc4d5492d5eea50e045a8b51c.tar.gz#egg=shared
https://github.com/codecov/shared/archive/148b7ae3a6d4cdfc554ba9ca8b911c13e82d77b8.tar.gz#egg=shared
https://github.com/codecov/opentelem-python/archive/refs/tags/v0.0.4a1.tar.gz#egg=codecovopentelem
https://github.com/codecov/test-results-parser/archive/5515e960d5d38881036e9127f86320efca649f13.tar.gz#egg=test-results-parser
boto3>=1.34
Expand Down
6 changes: 2 additions & 4 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -361,9 +361,7 @@ requests==2.31.0
respx==0.20.2
# via -r requirements.in
rfc3986[idna2008]==1.4.0
# via
# httpx
# rfc3986
# via httpx
rsa==4.7.2
# via google-auth
s3transfer==0.10.1
Expand All @@ -374,7 +372,7 @@ sentry-sdk==1.40.0
# via
# -r requirements.in
# shared
shared @ https://github.com/codecov/shared/archive/f8e95d52d6182d4fc4d5492d5eea50e045a8b51c.tar.gz
shared @ https://github.com/codecov/shared/archive/c0901f806b0af8c7d0ce19bbb78d5f5a541753d3.tar.gz
# via -r requirements.in
six==1.16.0
# via
Expand Down
6 changes: 3 additions & 3 deletions services/report/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,7 @@ async def initialize_and_save_report(
# finisher can build off of it later. Makes the assumption that the CFFs occupy the first
# j to i session ids where i is the max id of the CFFs and j is some integer less than i.
if await PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value_async(
repo_id=commit.repository.repoid
identifier=commit.repository.repoid
):
await self.save_parallel_report_to_archive(
commit, report, report_code
Expand Down Expand Up @@ -755,7 +755,7 @@ async def create_new_report_for_commit(self, commit: Commit) -> Report:
)
max_parenthood_deepness = (
await CARRYFORWARD_BASE_SEARCH_RANGE_BY_OWNER.check_value_async(
owner_id=repo.ownerid, default=10
identifier=repo.ownerid, default=10
)
)

Expand Down Expand Up @@ -856,7 +856,7 @@ def parse_raw_report_from_storage(
# so that the parallel pipeline can use those to parse. The serial pipeline rewrites the raw uploaded
# reports to a human readable version that doesn't include file fixes, so that's why copying is necessary.
if PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value(
repo_id=repo.repoid, default=False
identifier=repo.repoid, default=False
) and (not is_error_case):
parallel_url = archive_url.removesuffix(".txt") + "_PARALLEL.txt"
log.info(
Expand Down
4 changes: 2 additions & 2 deletions services/report/raw_upload_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ def process_raw_upload(
should_use_encoded_labels = (
upload
and USE_LABEL_INDEX_IN_REPORT_PROCESSING_BY_REPO_ID.check_value(
repo_id=upload.report.commit.repository.repoid, default=False
identifier=upload.report.commit.repository.repoid, default=False
)
)
# [javascript] check for both coverage.json and coverage/coverage.lcov
Expand Down Expand Up @@ -357,7 +357,7 @@ def _adjust_sessions(
if (
upload
and USE_LABEL_INDEX_IN_REPORT_PROCESSING_BY_REPO_ID.check_value(
repo_id=upload.report.commit.repository.repoid, default=False
identifier=upload.report.commit.repository.repoid, default=False
)
and to_partially_overwrite_flags
):
Expand Down
4 changes: 2 additions & 2 deletions tasks/sync_repos.py
Original file line number Diff line number Diff line change
Expand Up @@ -323,7 +323,7 @@ def process_repos(repos):
# Instead of relying exclusively on the webhooks to do that
# TODO: Maybe we don't need to run this every time, but once in a while just in case...
if await LIST_REPOS_GENERATOR_BY_OWNER_ID.check_value_async(
owner_id=ownerid, default=False
identifier=ownerid, default=False
):
with metrics.timer(
f"{metrics_scope}.sync_repos_using_integration.list_repos_generator"
Expand Down Expand Up @@ -436,7 +436,7 @@ def process_repos(repos):

try:
if await LIST_REPOS_GENERATOR_BY_OWNER_ID.check_value_async(
owner_id=ownerid, default=False
identifier=ownerid, default=False
):
with metrics.timer(f"{metrics_scope}.sync_repos.list_repos_generator"):
async for page in git.list_repos_generator():
Expand Down
4 changes: 2 additions & 2 deletions tasks/test_results_finisher.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,7 +232,7 @@ def process_impl_within_lock(
tags={"status": "success", "reason": "tests_failed"},
)
flaky_tests = None
if FLAKY_TEST_DETECTION.check_value(repo_id=repoid):
if FLAKY_TEST_DETECTION.check_value(identifier=repoid):
flaky_tests = dict()

notifier = TestResultsNotifier(commit, commit_yaml)
Expand Down Expand Up @@ -262,7 +262,7 @@ def process_impl_within_lock(
tags={"status": success, "reason": reason},
)

if FLAKY_TEST_DETECTION.check_value(repo_id=repoid):
if FLAKY_TEST_DETECTION.check_value(identifier=repoid):
log.info(
"Running flaky test detection",
extra=dict(
Expand Down
4 changes: 2 additions & 2 deletions tasks/upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -675,7 +675,7 @@ def _schedule_coverage_processing_task(
processing_tasks.append(sig)

if PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value(
repo_id=commit.repository.repoid
identifier=commit.repository.repoid
):
parallel_chunk_size = 1
num_sessions = len(argument_list)
Expand Down Expand Up @@ -776,7 +776,7 @@ def _schedule_coverage_processing_task(
serial_tasks = chain(*processing_tasks)

if PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value(
repo_id=commit.repository.repoid
identifier=commit.repository.repoid
):
parallel_tasks = chord(parallel_processing_tasks, finish_parallel_sig)
parallel_shadow_experiment = serial_tasks | parallel_tasks
Expand Down
2 changes: 1 addition & 1 deletion tasks/upload_finisher.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def run_impl(
repository = commit.repository

if (
PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value(repo_id=repository.repoid)
PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value(identifier=repository.repoid)
and in_parallel
):
actual_processing_results = {
Expand Down
10 changes: 5 additions & 5 deletions tasks/upload_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def run_impl(
)

if (
PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value(repo_id=repoid)
PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value(identifier=repoid)
and in_parallel
):
log.info(
Expand Down Expand Up @@ -190,7 +190,7 @@ def process_impl_within_lock(
**kwargs,
):
if (
not PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value(repo_id=repoid)
not PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value(identifier=repoid)
and in_parallel
):
log.info(
Expand Down Expand Up @@ -219,7 +219,7 @@ def process_impl_within_lock(
report_service = ReportService(commit_yaml)

if (
PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value(repo_id=repository.repoid)
PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value(identifier=repository.repoid)
and in_parallel
):
log.info(
Expand Down Expand Up @@ -315,7 +315,7 @@ def process_impl_within_lock(
results_dict = {}
if (
PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value(
repo_id=repository.repoid
identifier=repository.repoid
)
and in_parallel
):
Expand Down Expand Up @@ -395,7 +395,7 @@ def process_impl_within_lock(

if (
PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value(
repo_id=repository.repoid
identifier=repository.repoid
)
and in_parallel
):
Expand Down

0 comments on commit 4b32438

Please sign in to comment.