Skip to content

Commit

Permalink
add new user info to metrics Readme (apache#30989)
Browse files Browse the repository at this point in the history
* add new user info to metrics Readme

* add grant call for kubeproxyuser_ro
  • Loading branch information
volatilemolotov authored Apr 17, 2024
1 parent d6eb576 commit e3077b7
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 8 deletions.
7 changes: 7 additions & 0 deletions .test-infra/metrics/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,13 @@ Kubernetes deployment instructions are maintained in the wiki:
* [Community metrics](https://cwiki.apache.org/confluence/display/BEAM/Community+Metrics)
* [Test Results Monitoring](https://cwiki.apache.org/confluence/display/BEAM/Test+Results+Monitoring)

### PSQL User
Grafana running in metrics cluster is configured to use `kubeproxyuser_ro` user which does not come with any permissions. SELECT permission was manually granted on all tables in the `beammetrics` psql DB. Any new tables or if some table is created/recreated by automation should also add grants for the user:

```
GRANT SELECT ON table_name TO kubeproxyuser_ro;
```


### Note: Basic Auth is not supported on BEAM Clusters as of March 4th 2022

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,9 @@


class Workflow:
def __init__(self, id, name, filename, url, category=None, threshold=0.5, is_flaky=False):
def __init__(
self, id, name, filename, url, category=None, threshold=0.5, is_flaky=False
):
self.id = id
self.name = name
self.filename = filename
Expand All @@ -78,7 +80,9 @@ def clone_git_beam_repo(dest_path):
if not os.path.exists(filesystem_path):
os.mkdir(filesystem_path)
os.chdir(filesystem_path)
os.system(f"git clone --filter=blob:none --sparse https://github.com/{GIT_ORG}/beam")
os.system(
f"git clone --filter=blob:none --sparse https://github.com/{GIT_ORG}/beam"
)
os.chdir("beam")
os.system("git sparse-checkout init --cone")
os.system(f"git sparse-checkout set {dest_path}")
Expand Down Expand Up @@ -165,7 +169,9 @@ def filter_workflow_runs(run, issue):
response = await fetch(url, semaphore, params, headers)
if len(response):
print(f"Found a recently closed issue for the {workflow.name} workflow")
workflow_runs = [run for run in workflow_runs if filter_workflow_runs(run, response[0])]
workflow_runs = [
run for run in workflow_runs if filter_workflow_runs(run, response[0])
]

print(f"Number of workflow runs to consider: {len(workflow_runs)}")
if len(workflow_runs) < 3:
Expand All @@ -176,7 +182,7 @@ def filter_workflow_runs(run, issue):
if len(workflow_runs):
failed_runs = list(filter(lambda r: r.status == "failure", workflow_runs))
print(f"Number of failed workflow runs: {len(failed_runs)}")
success_rate -= (len(failed_runs) / len(workflow_runs))
success_rate -= len(failed_runs) / len(workflow_runs)

print(f"Success rate: {success_rate}")
return True if success_rate < workflow.threshold else False
Expand Down Expand Up @@ -287,7 +293,9 @@ def append_workflow_runs(workflow, runs):
datetime.strptime(run["run_started_at"], "%Y-%m-%dT%H:%M:%SZ"),
)
if workflow_runs.get(workflow_run.id):
print(f"Duplicate run for {workflow.id} workflow: {workflow_run.id}")
print(
f"Duplicate run for {workflow.id} workflow: {workflow_run.id}"
)
workflow_runs[workflow_run.id] = workflow_run
workflow.runs.extend(workflow_runs.values())

Expand Down Expand Up @@ -397,7 +405,9 @@ def append_workflow_runs(workflow, runs):
"None",
"None",
workflow.id,
datetime.strptime("0001-01-01T00:00:00Z", "%Y-%m-%dT%H:%M:%SZ"),
datetime.strptime(
"0001-01-01T00:00:00Z", "%Y-%m-%dT%H:%M:%SZ"
),
)
)
if len(workflow.runs) >= int(GH_NUMBER_OF_WORKFLOW_RUNS_TO_FETCH):
Expand All @@ -408,7 +418,7 @@ def append_workflow_runs(workflow, runs):

for workflow in list(workflows.values()):
runs = sorted(workflow.runs, key=lambda r: r.started_at, reverse=True)
workflow.runs = runs[:int(GH_NUMBER_OF_WORKFLOW_RUNS_TO_FETCH)]
workflow.runs = runs[: int(GH_NUMBER_OF_WORKFLOW_RUNS_TO_FETCH)]

return list(workflows.values())

Expand Down Expand Up @@ -444,6 +454,12 @@ def save_workflows(workflows):
CONSTRAINT fk_workflow FOREIGN KEY(workflow_id) REFERENCES {workflows_table_name}(workflow_id))\n"""
cursor.execute(create_workflows_table_query)
cursor.execute(create_workflow_runs_table_query)
grant_workflows_query = f"""
GRANT SELECT ON github_workflows TO kubeproxyuser_ro;"""
grant_workflow_runs_query = f"""
GRANT SELECT ON github_workflow_runs TO kubeproxyuser_ro;"""
cursor.execute(grant_workflows_query)
cursor.execute(grant_workflow_runs_query)
insert_workflows_query = f"""
INSERT INTO {workflows_table_name} (workflow_id, name, filename, url, dashboard_category, threshold, is_flaky, retrieved_at)
VALUES %s"""
Expand All @@ -467,7 +483,9 @@ def save_workflows(workflows):
)
)
for idx, run in enumerate(workflow.runs):
insert_workflow_runs.append((run.id, idx+1, run.status, run.url, run.workflow_id, run.started_at))
insert_workflow_runs.append(
(run.id, idx + 1, run.status, run.url, run.workflow_id, run.started_at)
)
psycopg2.extras.execute_values(cursor, insert_workflows_query, insert_workflows)
psycopg2.extras.execute_values(
cursor, insert_workflow_runs_query, insert_workflow_runs
Expand Down

0 comments on commit e3077b7

Please sign in to comment.