From 4b324380ecad58e8f7fc9d311ee095f5f9266f17 Mon Sep 17 00:00:00 2001 From: daniel-codecov <159859649+daniel-codecov@users.noreply.github.com> Date: Wed, 1 May 2024 18:28:37 -0400 Subject: [PATCH] chore: update feature interface (#421) * update interface * update more interfaces * update shared * fix missing test * update shared --- conftest.py | 2 +- requirements.in | 2 +- requirements.txt | 6 ++---- services/report/__init__.py | 6 +++--- services/report/raw_upload_processor.py | 4 ++-- tasks/sync_repos.py | 4 ++-- tasks/test_results_finisher.py | 4 ++-- tasks/upload.py | 4 ++-- tasks/upload_finisher.py | 2 +- tasks/upload_processor.py | 10 +++++----- 10 files changed, 21 insertions(+), 23 deletions(-) diff --git a/conftest.py b/conftest.py index def4c0370..893284cae 100644 --- a/conftest.py +++ b/conftest.py @@ -325,7 +325,7 @@ def mock_feature(mocker, request): from shared.rollouts import Feature - def check_value(self, *, owner_id=None, repo_id=None, default=False): + def check_value(self, identifier, default=False): return default return mocker.patch.object(Feature, "check_value", check_value) diff --git a/requirements.in b/requirements.in index 8bb64f173..4850c9ee2 100644 --- a/requirements.in +++ b/requirements.in @@ -1,4 +1,4 @@ -https://github.com/codecov/shared/archive/f8e95d52d6182d4fc4d5492d5eea50e045a8b51c.tar.gz#egg=shared +https://github.com/codecov/shared/archive/148b7ae3a6d4cdfc554ba9ca8b911c13e82d77b8.tar.gz#egg=shared https://github.com/codecov/opentelem-python/archive/refs/tags/v0.0.4a1.tar.gz#egg=codecovopentelem https://github.com/codecov/test-results-parser/archive/5515e960d5d38881036e9127f86320efca649f13.tar.gz#egg=test-results-parser boto3>=1.34 diff --git a/requirements.txt b/requirements.txt index 07d484f68..ada54deaf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -361,9 +361,7 @@ requests==2.31.0 respx==0.20.2 # via -r requirements.in rfc3986[idna2008]==1.4.0 - # via - # httpx - # rfc3986 + # via httpx rsa==4.7.2 # via google-auth s3transfer==0.10.1 @@ -374,7 +372,7 @@ sentry-sdk==1.40.0 # via # -r requirements.in # shared -shared @ https://github.com/codecov/shared/archive/f8e95d52d6182d4fc4d5492d5eea50e045a8b51c.tar.gz +shared @ https://github.com/codecov/shared/archive/c0901f806b0af8c7d0ce19bbb78d5f5a541753d3.tar.gz # via -r requirements.in six==1.16.0 # via diff --git a/services/report/__init__.py b/services/report/__init__.py index e97659c2f..267796798 100644 --- a/services/report/__init__.py +++ b/services/report/__init__.py @@ -295,7 +295,7 @@ async def initialize_and_save_report( # finisher can build off of it later. Makes the assumption that the CFFs occupy the first # j to i session ids where i is the max id of the CFFs and j is some integer less than i. if await PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value_async( - repo_id=commit.repository.repoid + identifier=commit.repository.repoid ): await self.save_parallel_report_to_archive( commit, report, report_code @@ -755,7 +755,7 @@ async def create_new_report_for_commit(self, commit: Commit) -> Report: ) max_parenthood_deepness = ( await CARRYFORWARD_BASE_SEARCH_RANGE_BY_OWNER.check_value_async( - owner_id=repo.ownerid, default=10 + identifier=repo.ownerid, default=10 ) ) @@ -856,7 +856,7 @@ def parse_raw_report_from_storage( # so that the parallel pipeline can use those to parse. The serial pipeline rewrites the raw uploaded # reports to a human readable version that doesn't include file fixes, so that's why copying is necessary. if PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value( - repo_id=repo.repoid, default=False + identifier=repo.repoid, default=False ) and (not is_error_case): parallel_url = archive_url.removesuffix(".txt") + "_PARALLEL.txt" log.info( diff --git a/services/report/raw_upload_processor.py b/services/report/raw_upload_processor.py index ae69c11fc..08880ea36 100644 --- a/services/report/raw_upload_processor.py +++ b/services/report/raw_upload_processor.py @@ -114,7 +114,7 @@ def process_raw_upload( should_use_encoded_labels = ( upload and USE_LABEL_INDEX_IN_REPORT_PROCESSING_BY_REPO_ID.check_value( - repo_id=upload.report.commit.repository.repoid, default=False + identifier=upload.report.commit.repository.repoid, default=False ) ) # [javascript] check for both coverage.json and coverage/coverage.lcov @@ -357,7 +357,7 @@ def _adjust_sessions( if ( upload and USE_LABEL_INDEX_IN_REPORT_PROCESSING_BY_REPO_ID.check_value( - repo_id=upload.report.commit.repository.repoid, default=False + identifier=upload.report.commit.repository.repoid, default=False ) and to_partially_overwrite_flags ): diff --git a/tasks/sync_repos.py b/tasks/sync_repos.py index 41a17ccf4..d4e0bab52 100644 --- a/tasks/sync_repos.py +++ b/tasks/sync_repos.py @@ -323,7 +323,7 @@ def process_repos(repos): # Instead of relying exclusively on the webhooks to do that # TODO: Maybe we don't need to run this every time, but once in a while just in case... if await LIST_REPOS_GENERATOR_BY_OWNER_ID.check_value_async( - owner_id=ownerid, default=False + identifier=ownerid, default=False ): with metrics.timer( f"{metrics_scope}.sync_repos_using_integration.list_repos_generator" @@ -436,7 +436,7 @@ def process_repos(repos): try: if await LIST_REPOS_GENERATOR_BY_OWNER_ID.check_value_async( - owner_id=ownerid, default=False + identifier=ownerid, default=False ): with metrics.timer(f"{metrics_scope}.sync_repos.list_repos_generator"): async for page in git.list_repos_generator(): diff --git a/tasks/test_results_finisher.py b/tasks/test_results_finisher.py index 538515d8b..198ea7bb2 100644 --- a/tasks/test_results_finisher.py +++ b/tasks/test_results_finisher.py @@ -232,7 +232,7 @@ def process_impl_within_lock( tags={"status": "success", "reason": "tests_failed"}, ) flaky_tests = None - if FLAKY_TEST_DETECTION.check_value(repo_id=repoid): + if FLAKY_TEST_DETECTION.check_value(identifier=repoid): flaky_tests = dict() notifier = TestResultsNotifier(commit, commit_yaml) @@ -262,7 +262,7 @@ def process_impl_within_lock( tags={"status": success, "reason": reason}, ) - if FLAKY_TEST_DETECTION.check_value(repo_id=repoid): + if FLAKY_TEST_DETECTION.check_value(identifier=repoid): log.info( "Running flaky test detection", extra=dict( diff --git a/tasks/upload.py b/tasks/upload.py index 84ce43167..216aeb61c 100644 --- a/tasks/upload.py +++ b/tasks/upload.py @@ -675,7 +675,7 @@ def _schedule_coverage_processing_task( processing_tasks.append(sig) if PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value( - repo_id=commit.repository.repoid + identifier=commit.repository.repoid ): parallel_chunk_size = 1 num_sessions = len(argument_list) @@ -776,7 +776,7 @@ def _schedule_coverage_processing_task( serial_tasks = chain(*processing_tasks) if PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value( - repo_id=commit.repository.repoid + identifier=commit.repository.repoid ): parallel_tasks = chord(parallel_processing_tasks, finish_parallel_sig) parallel_shadow_experiment = serial_tasks | parallel_tasks diff --git a/tasks/upload_finisher.py b/tasks/upload_finisher.py index 5f992cdc8..2aa1134e5 100644 --- a/tasks/upload_finisher.py +++ b/tasks/upload_finisher.py @@ -93,7 +93,7 @@ def run_impl( repository = commit.repository if ( - PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value(repo_id=repository.repoid) + PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value(identifier=repository.repoid) and in_parallel ): actual_processing_results = { diff --git a/tasks/upload_processor.py b/tasks/upload_processor.py index 213c9f98f..997e254f0 100644 --- a/tasks/upload_processor.py +++ b/tasks/upload_processor.py @@ -97,7 +97,7 @@ def run_impl( ) if ( - PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value(repo_id=repoid) + PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value(identifier=repoid) and in_parallel ): log.info( @@ -190,7 +190,7 @@ def process_impl_within_lock( **kwargs, ): if ( - not PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value(repo_id=repoid) + not PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value(identifier=repoid) and in_parallel ): log.info( @@ -219,7 +219,7 @@ def process_impl_within_lock( report_service = ReportService(commit_yaml) if ( - PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value(repo_id=repository.repoid) + PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value(identifier=repository.repoid) and in_parallel ): log.info( @@ -315,7 +315,7 @@ def process_impl_within_lock( results_dict = {} if ( PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value( - repo_id=repository.repoid + identifier=repository.repoid ) and in_parallel ): @@ -395,7 +395,7 @@ def process_impl_within_lock( if ( PARALLEL_UPLOAD_PROCESSING_BY_REPO.check_value( - repo_id=repository.repoid + identifier=repository.repoid ) and in_parallel ):