Skip to content

Commit

Permalink
add log_tuples function (#1896)
Browse files Browse the repository at this point in the history
Co-authored-by: kk105 <[email protected]>
  • Loading branch information
kk1050 and kk105 authored Jun 6, 2022
1 parent e702624 commit 4ddd273
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 12 deletions.
5 changes: 3 additions & 2 deletions artiq/frontend/artiq_master.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,12 +53,13 @@ def get_argparser():
"--experiment-subdir", default="",
help=("path to the experiment folder from the repository root "
"(default: '%(default)s')"))

log_args(parser)

parser.add_argument("--name",
help="friendly name, displayed in dashboards "
"to identify master instead of server address")
parser.add_argument("--log-submissions", default=None,
help="set the filename to create the experiment subimission")

return parser

Expand Down Expand Up @@ -111,7 +112,7 @@ def ccb_issue(service, *args, **kwargs):
repo_backend, worker_handlers, args.experiment_subdir)
atexit.register(experiment_db.close)

scheduler = Scheduler(RIDCounter(), worker_handlers, experiment_db)
scheduler = Scheduler(RIDCounter(), worker_handlers, experiment_db, args.log_submissions)
scheduler.start()
atexit_register_coroutine(scheduler.stop)

Expand Down
21 changes: 16 additions & 5 deletions artiq/master/scheduler.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import asyncio
import logging
import csv
from enum import Enum
from time import time

Expand Down Expand Up @@ -113,14 +114,21 @@ async def build(self):


class RunPool:
def __init__(self, ridc, worker_handlers, notifier, experiment_db):
def __init__(self, ridc, worker_handlers, notifier, experiment_db, log_submissions):
self.runs = dict()
self.state_changed = Condition()

self.ridc = ridc
self.worker_handlers = worker_handlers
self.notifier = notifier
self.experiment_db = experiment_db
self.log_submissions = log_submissions

def log_submission(self, rid, expid):
start_time = time()
with open(self.log_submissions, 'a', newline='') as f:
writer = csv.writer(f)
writer.writerow([rid, start_time, expid["file"]])

def submit(self, expid, priority, due_date, flush, pipeline_name):
# mutates expid to insert head repository revision if None.
Expand All @@ -135,6 +143,8 @@ def submit(self, expid, priority, due_date, flush, pipeline_name):
wd, repo_msg = None, None
run = Run(rid, pipeline_name, wd, expid, priority, due_date, flush,
self, repo_msg=repo_msg)
if self.log_submissions is not None:
self.log_submission(rid, expid)
self.runs[rid] = run
self.state_changed.notify()
return rid
Expand Down Expand Up @@ -311,8 +321,8 @@ async def _do(self):


class Pipeline:
def __init__(self, ridc, deleter, worker_handlers, notifier, experiment_db):
self.pool = RunPool(ridc, worker_handlers, notifier, experiment_db)
def __init__(self, ridc, deleter, worker_handlers, notifier, experiment_db, log_submissions):
self.pool = RunPool(ridc, worker_handlers, notifier, experiment_db, log_submissions)
self._prepare = PrepareStage(self.pool, deleter.delete)
self._run = RunStage(self.pool, deleter.delete)
self._analyze = AnalyzeStage(self.pool, deleter.delete)
Expand Down Expand Up @@ -383,7 +393,7 @@ async def _do(self):


class Scheduler:
def __init__(self, ridc, worker_handlers, experiment_db):
def __init__(self, ridc, worker_handlers, experiment_db, log_submissions):
self.notifier = Notifier(dict())

self._pipelines = dict()
Expand All @@ -393,6 +403,7 @@ def __init__(self, ridc, worker_handlers, experiment_db):

self._ridc = ridc
self._deleter = Deleter(self._pipelines)
self._log_submissions = log_submissions

def start(self):
self._deleter.start()
Expand Down Expand Up @@ -423,7 +434,7 @@ def submit(self, pipeline_name, expid, priority=0, due_date=None, flush=False):
logger.debug("creating pipeline '%s'", pipeline_name)
pipeline = Pipeline(self._ridc, self._deleter,
self._worker_handlers, self.notifier,
self._experiment_db)
self._experiment_db, self._log_submissions)
self._pipelines[pipeline_name] = pipeline
pipeline.start()
return pipeline.pool.submit(expid, priority, due_date, flush, pipeline_name)
Expand Down
10 changes: 5 additions & 5 deletions artiq/test/test_scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def setUp(self):

def test_steps(self):
loop = self.loop
scheduler = Scheduler(_RIDCounter(0), dict(), None)
scheduler = Scheduler(_RIDCounter(0), dict(), None, None)
expid = _get_expid("EmptyExperiment")

expect = _get_basic_steps(1, expid)
Expand Down Expand Up @@ -129,7 +129,7 @@ def test_pending_priority(self):
prepare."""
loop = self.loop
handlers = {}
scheduler = Scheduler(_RIDCounter(0), handlers, None)
scheduler = Scheduler(_RIDCounter(0), handlers, None, None)
handlers["scheduler_check_pause"] = scheduler.check_pause

expid_empty = _get_expid("EmptyExperiment")
Expand Down Expand Up @@ -293,7 +293,7 @@ def check_termination(mod):
handlers = {
"update_dataset": check_termination
}
scheduler = Scheduler(_RIDCounter(0), handlers, None)
scheduler = Scheduler(_RIDCounter(0), handlers, None, None)

expid_bg = _get_expid("BackgroundExperiment")
expid = _get_expid("EmptyExperiment")
Expand Down Expand Up @@ -351,7 +351,7 @@ def test_close_with_active_runs(self):
"""Check scheduler exits with experiments still running"""
loop = self.loop

scheduler = Scheduler(_RIDCounter(0), {}, None)
scheduler = Scheduler(_RIDCounter(0), {}, None, None)

expid_bg = _get_expid("BackgroundExperiment")
# Suppress the SystemExit backtrace when worker process is killed.
Expand Down Expand Up @@ -392,7 +392,7 @@ def notify(mod):

def test_flush(self):
loop = self.loop
scheduler = Scheduler(_RIDCounter(0), dict(), None)
scheduler = Scheduler(_RIDCounter(0), dict(), None, None)
expid = _get_expid("EmptyExperiment")

expect = _get_basic_steps(1, expid, 1, True)
Expand Down

0 comments on commit 4ddd273

Please sign in to comment.