diff --git a/notebooks_tsqr/ExposureDetail.ipynb b/notebooks_tsqr/ExposureDetail.ipynb
index 035f4f3..37f011b 100644
--- a/notebooks_tsqr/ExposureDetail.ipynb
+++ b/notebooks_tsqr/ExposureDetail.ipynb
@@ -14,7 +14,7 @@
"# day_obs values: TODAY, YESTERDAY, YYYY-MM-DD\n",
"# Report on observing nights that start upto but not included this day.\n",
"#!day_obs = '2024-09-25' # Value to use for local testing (usdf)\n",
- "day_obs = \"2024-12-05\" # TODO Change to 'YESTERDAY' to test with default before push\n",
+ "day_obs = \"2024-11-20\" # TODO Change to 'YESTERDAY' to test with default before push\n",
"instrument = \"LSSTComCam\" # LSSTComCam, LATISS, LSSTCam\n",
"observation_reason = \"ALL\"\n",
"observation_type = \"science\" # TODO: \"science\", \"acq\", default=\"ALL\"\n",
@@ -25,7 +25,11 @@
"cell_type": "code",
"execution_count": null,
"id": "1",
- "metadata": {},
+ "metadata": {
+ "jupyter": {
+ "source_hidden": true
+ }
+ },
"outputs": [],
"source": [
"# IMPORT everything\n",
@@ -56,7 +60,11 @@
"cell_type": "code",
"execution_count": null,
"id": "2",
- "metadata": {},
+ "metadata": {
+ "jupyter": {
+ "source_hidden": true
+ }
+ },
"outputs": [],
"source": [
"# Set default env to \"usdf\" and try before PUSH to repo.\n",
@@ -84,7 +92,11 @@
"cell_type": "code",
"execution_count": null,
"id": "3",
- "metadata": {},
+ "metadata": {
+ "jupyter": {
+ "source_hidden": true
+ }
+ },
"outputs": [],
"source": [
"# Normalize Parameters (both explicit Times Squares params, in implicit ones)\n",
@@ -114,7 +126,7 @@
" min_dayobs=min_day_obs,\n",
" max_dayobs=max_day_obs,\n",
" verbose=False, # TODO change to False before push\n",
- " warning=False, # TODO change to True before push\n",
+ " warning=True, # TODO change to True before push\n",
" limit=5000,\n",
")"
]
@@ -125,6 +137,16 @@
"id": "5",
"metadata": {},
"outputs": [],
+ "source": [
+ "# https://usdf-rsp-dev.slac.stanford.edu/consdb/query"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "6",
+ "metadata": {},
+ "outputs": [],
"source": [
"if observation_reason:\n",
" md(f\"# Observation Reason: {observation_reason}\")\n",
@@ -139,43 +161,52 @@
" observation_type=observation_type,\n",
")\n",
"\n",
- "md(f\"The number of exposures in this filtered result is {len(df.index)}\")\n",
+ "md(f\"The number of exposures in this filtered result is {len(df.index)}.\")\n",
+ "md(\n",
+ " f\"Exposures cover the day_obs range starting {allsrc.min_dayobs} and ending before {allsrc.max_dayobs}.\"\n",
+ ")\n",
"display(HTML(df.to_html(index=False)))"
]
},
{
"cell_type": "markdown",
- "id": "6",
+ "id": "7",
"metadata": {},
"source": [
- "| Symbol | Meaning |\n",
- "|:---|:---|\n",
- "| G | Good |\n",
- "| ? | Questionable |\n",
- "| R | Junk |"
+ "-----------------"
]
},
{
"cell_type": "markdown",
- "id": "7",
- "metadata": {},
+ "id": "8",
+ "metadata": {
+ "jp-MarkdownHeadingCollapsed": true
+ },
"source": [
- "-----------------"
+ "# Developer Only Section"
]
},
{
- "cell_type": "markdown",
- "id": "8",
+ "cell_type": "raw",
+ "id": "9",
"metadata": {},
"source": [
- "# Developer Only Section"
+ "| Symbol | Meaning |\n",
+ "|:---|:---|\n",
+ "| G | Good |\n",
+ "| ? | Questionable |\n",
+ "| R | Junk |"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "9",
- "metadata": {},
+ "id": "10",
+ "metadata": {
+ "jupyter": {
+ "source_hidden": true
+ }
+ },
"outputs": [],
"source": [
"display(params)\n",
@@ -186,8 +217,12 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "10",
- "metadata": {},
+ "id": "11",
+ "metadata": {
+ "jupyter": {
+ "source_hidden": true
+ }
+ },
"outputs": [],
"source": [
"print({k: len(v) for k, v in allsrc.exp_src.exposures.items()})"
@@ -196,8 +231,12 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "11",
- "metadata": {},
+ "id": "12",
+ "metadata": {
+ "jupyter": {
+ "source_hidden": true
+ }
+ },
"outputs": [],
"source": [
"allsrc"
@@ -206,8 +245,12 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "12",
- "metadata": {},
+ "id": "13",
+ "metadata": {
+ "jupyter": {
+ "source_hidden": true
+ }
+ },
"outputs": [],
"source": [
"print(f\"Elapsed time (excluding code import): {timer.toc:.1f} seconds\")\n",
diff --git a/notebooks_tsqr/NightLog.ipynb b/notebooks_tsqr/NightLog.ipynb
index 1b6d547..d6041ee 100644
--- a/notebooks_tsqr/NightLog.ipynb
+++ b/notebooks_tsqr/NightLog.ipynb
@@ -1,25 +1,20 @@
{
"cells": [
- {
- "cell_type": "raw",
- "id": "0",
- "metadata": {},
- "source": []
- },
{
"cell_type": "markdown",
- "id": "1",
+ "id": "0",
"metadata": {},
"source": [
"# About this Page\n",
"***What are we missing?*** See the slack *#ts-logging* channel for discussion about this page. Use it to report problems, ask questions, and make requests for changes. \n",
"\n",
- "## What is new in this application?(newest change at top of list)\n",
+ "## What is new in this application?(newest changes at top of list)\n",
"- Stakeholders decided that supported instruments will be: LSSTComCam, LSSTCam, and LATISS. Of those, LSSTCam is not supported in ConsDB and limitations in LATISS will force some of the requested fields for ExposureDetail to be given as NA.\n",
+ "- Various minor, mostly cosmetic changes.\n",
+ "- Removed Consolidated Database section. Most of it is in ExposureDetail linked to from Data Log.\n",
"- Added optional WARNING behavior to alert when no records are found, instruments are excluded from results, etc.\n",
- "- Name change of this page to *Nightly Digest* (but file name remains the same)\n",
- "- Added Merged time-log with compaction. Merges all sources by time and compacts them into a single time period (currently 4 hour). Rendering of DataFrame is now done via a jinja2 Template which gives much greater (largely unrealized) control over display.\n",
- "- Added initial Consolidated Database section\n",
+ "- Name change of this page to *Nightly Digest* (but name in Times Square url remains the same)\n",
+ "- Added Merged time-log with compaction. Merges all sources by time and compacts them into summary time periods (currently 4 hour). Rendering of DataFrame done via HTMLTemplate which gives much greater (unrealized) control over display. \n",
"- Add section for \"Links to related resources\". Let us know is other links should be added.\n",
"- Exposure quality flag added to Exposure Detail (accessed by following row links in Data Log)\n",
"- \n",
@@ -31,10 +26,8 @@
"\n",
"## Changes being considered in the near-term\n",
"You can influence prorities by telling #ts-logging that a feature is *really important* or *not important*.\n",
- "- Add parameter for Period that used in Merged time-log\n",
- "- Show more exposure fields on the ExposureDetail page (accessed via links in Data Log) section. Include fields in *Consolidated Database* section plus some Visit fields.\n",
+ "- Distinguish between errors in report and errors/warnings from Night Digest code. (How? Color coding? Something else?)\n",
"- Ensure all data-logs include data from Dome noon to noon for day_obs\n",
- "- (Stretch) Invent a general way to compact DataFrame by removing redundancy. Similar [DB normalization](https://en.wikipedia.org/wiki/Database_normalization)\n",
"- (Big Stretch) Create a single time-log that merges records from all selected *Source Adapters* into a *compacted* log. Allow delta like \"20min\", \"4hour\".\n",
"- (Realy Big Stretch) Using AI LLM to summarize a multi-source merged night of logging into: a. full summary, b. dark-time/light-time summaries, c. 4-hr summaries"
]
@@ -42,7 +35,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "2",
+ "id": "1",
"metadata": {},
"outputs": [],
"source": [
@@ -53,18 +46,19 @@
"# day_obs values: TODAY, v, YYYY-MM-DD\n",
"# Report on observing nights that start upto but not included this day.\n",
"# day_obs = '2024-09-25' # 2024-12-05 Value to use for local testing (Summit)\n",
- "day_obs = \"2024-12-05\" # TODO Change to 'YESTERDAY' and 'TODAY' to test with default before push\n",
+ "day_obs = \"2024-11-20\" # TODO Change to 'YESTERDAY' and 'TODAY' to test with default before push\n",
"\n",
"# Total number of days of data to display (ending on day_obs)\n",
"number_of_days = \"1\" # TODO Change to '1' to test with default before push\n",
- "\n",
- "verbose = \"false\" # TODO change to false before push, else true"
+ "period = \"2h\" # TODO change to 4h before push\n",
+ "verbose = \"false\" # TODO change to false before push, else true\n",
+ "warning = \"false\" # TODO change to false before push, else true"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "3",
+ "id": "2",
"metadata": {},
"outputs": [],
"source": [
@@ -100,7 +94,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "4",
+ "id": "3",
"metadata": {},
"outputs": [],
"source": [
@@ -114,19 +108,23 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "5",
+ "id": "4",
"metadata": {},
"outputs": [],
"source": [
"# Validate parameters, return usable ones\n",
- "usable, error = ut.fallback_parameters(day_obs, number_of_days, verbose)\n",
+ "usable, error = ut.fallback_parameters(\n",
+ " day_obs, number_of_days, period, verbose, warning\n",
+ ")\n",
"if error:\n",
" print(error)\n",
"\n",
"date = ut.get_datetime_from_dayobs_str(usable[\"day_obs\"])\n",
"# date: is EXCLUSIVE (upto, but not including)\n",
"days = usable[\"number_of_days\"]\n",
+ "period = usable[\"period\"]\n",
"verbose = usable[\"verbose\"]\n",
+ "warning = usable[\"warning\"]\n",
"\n",
"# Thus: [min_day_obs,max_day_obs)\n",
"# Format: string, YYYY-MM-DD\n",
@@ -139,7 +137,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "6",
+ "id": "5",
"metadata": {},
"outputs": [],
"source": [
@@ -150,7 +148,7 @@
" min_dayobs=min_day_obs,\n",
" max_dayobs=max_day_obs,\n",
" verbose=verbose,\n",
- " warning=True, # TODO change to True before push\n",
+ " warning=warning,\n",
" limit=5000,\n",
" exclude_instruments=[], # TODO change to empty list before push\n",
")\n",
@@ -159,7 +157,7 @@
},
{
"cell_type": "markdown",
- "id": "7",
+ "id": "6",
"metadata": {},
"source": [
"----------"
@@ -168,7 +166,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "8",
+ "id": "7",
"metadata": {},
"outputs": [],
"source": [
@@ -185,14 +183,14 @@
},
{
"cell_type": "markdown",
- "id": "9",
+ "id": "8",
"metadata": {},
"source": [
"# Table of Contents\n",
+ "* [Almanac (BETA)](#Almanac-BETA)\n",
"* [Night Report (BETA)](#Night-Report-BETA)\n",
" - AuxTel\n",
" - Simonyi\n",
- "* [Almanac (BETA)](#Almanac-BETA)\n",
"* [Summary plots of whole night (DRAFT)](#Summary-plots-of-whole-night-DRAFT)\n",
"* [Links to related resources (BETA)](#Links-to-related-resources-BETA)\n",
"* [Time Accounting (DRAFT)](#Time-Accounting-DRAFT)\n",
@@ -204,14 +202,13 @@
" - Simonyi\n",
"* [Data Log (BETA)](#Data-Log-BETA)\n",
"* [Narrative Log (BETA)](#Narrative-Log-BETA)\n",
- "* [Consolidated Database (DRAFT)](#Consolidated-Database-DRAFT)\n",
"* [Merged time-log with compaction (DRAFT)](#Merged-time-log-with-compaction-DRAFT)\n",
"* [Developer Only Section (REMOVE)](#Developer-Only-Section-REMOVE)"
]
},
{
"cell_type": "markdown",
- "id": "10",
+ "id": "9",
"metadata": {},
"source": [
"## Almanac BETA \n",
@@ -222,7 +219,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "11",
+ "id": "10",
"metadata": {},
"outputs": [],
"source": [
@@ -232,7 +229,7 @@
},
{
"cell_type": "markdown",
- "id": "12",
+ "id": "11",
"metadata": {},
"source": [
"## Night Report BETA \n",
@@ -243,7 +240,7 @@
},
{
"cell_type": "markdown",
- "id": "13",
+ "id": "12",
"metadata": {},
"source": [
"Also see: [2024-12-09 Commissioning Plan](https://rubinobs.atlassian.net/projects/BLOCK?selectedItem=com.atlassian.plugins.atlassian-connect-plugin:com.kanoah.test-manager__main-project-page#!/testCycle/BLOCK-R164)\n",
@@ -253,7 +250,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "14",
+ "id": "13",
"metadata": {},
"outputs": [],
"source": [
@@ -264,7 +261,7 @@
},
{
"cell_type": "markdown",
- "id": "15",
+ "id": "14",
"metadata": {},
"source": [
"## Summary plots of whole night DRAFT"
@@ -272,7 +269,7 @@
},
{
"cell_type": "markdown",
- "id": "16",
+ "id": "15",
"metadata": {},
"source": [
"(content not yet defined in storyboard)\n",
@@ -283,7 +280,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "17",
+ "id": "16",
"metadata": {},
"outputs": [],
"source": [
@@ -294,7 +291,7 @@
},
{
"cell_type": "markdown",
- "id": "18",
+ "id": "17",
"metadata": {},
"source": [
"## Links to related resources BETA\n",
@@ -304,7 +301,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "19",
+ "id": "18",
"metadata": {},
"outputs": [],
"source": [
@@ -325,19 +322,18 @@
},
{
"cell_type": "markdown",
- "id": "20",
+ "id": "19",
"metadata": {},
"source": [
"## Time Accounting BETA\n",
"TODO:\n",
- "- Time-loss for Fault and Weather (very rarely in narrativelog/messages)\n",
- "- SlewTime from TMAEvent (or similar). **SlewTime might find its way into the Consolidate Database**, in which case we could get it from there.\n"
+ "- Time-loss for Fault and Weather (very rarely in narrativelog/messages)"
]
},
{
"cell_type": "code",
"execution_count": null,
- "id": "21",
+ "id": "20",
"metadata": {},
"outputs": [],
"source": [
@@ -349,15 +345,16 @@
},
{
"cell_type": "markdown",
- "id": "22",
+ "id": "21",
"metadata": {},
"source": [
- "- (1) There is no practical way to get detector read-out time. A value of 2.41 seconds per exposure is used."
+ "- (1) There is no practical way to get detector read-out time. A value of 2.41 seconds per exposure is used.\n",
+ "- (2) There is currently no simple way to get slew times. We SlewTime to find its way into the Consolidated Database eventually. "
]
},
{
"cell_type": "markdown",
- "id": "23",
+ "id": "22",
"metadata": {},
"source": [
"## Jira Tickets BETA \n",
@@ -369,7 +366,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "24",
+ "id": "23",
"metadata": {},
"outputs": [],
"source": [
@@ -383,7 +380,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "25",
+ "id": "24",
"metadata": {},
"outputs": [],
"source": [
@@ -404,7 +401,7 @@
},
{
"cell_type": "markdown",
- "id": "26",
+ "id": "25",
"metadata": {},
"source": [
"## BLOCKS Observed DRAFT\n",
@@ -413,7 +410,7 @@
},
{
"cell_type": "markdown",
- "id": "27",
+ "id": "26",
"metadata": {},
"source": [
"## Data Log BETA\n",
@@ -423,7 +420,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "28",
+ "id": "27",
"metadata": {},
"outputs": [],
"source": [
@@ -434,13 +431,13 @@
" for field_name, df in df_dict.items():\n",
" if df.empty:\n",
" continue\n",
- " md(\"##### \" + field_name.title().replace(\"_\", \" \"))\n",
+ " #! md(\"##### \" + field_name.title().replace(\"_\", \" \"))\n",
" display(HTML(df.style.hide().to_html(escape=False)))"
]
},
{
"cell_type": "markdown",
- "id": "29",
+ "id": "28",
"metadata": {},
"source": [
"## Narrative Log BETA \n",
@@ -453,7 +450,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "30",
+ "id": "29",
"metadata": {},
"outputs": [],
"source": [
@@ -463,7 +460,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "31",
+ "id": "30",
"metadata": {
"editable": true,
"slideshow": {
@@ -480,29 +477,7 @@
},
{
"cell_type": "markdown",
- "id": "32",
- "metadata": {},
- "source": [
- "## Consolidated Database DRAFT\n",
- "Retrieve exposure records from ConsDB. The Developer Only Section contains a list of ALL fields currently available in ConsDB per [ConsDB API](https://usdf-rsp.slac.stanford.edu/consdb/docs).\n",
- "\n",
- "- [ ] TODO: Integrate this and other queries into columns of other sections.\n",
- "- [ ] TODO: Remove this section from NightLog and incorporate it into ExposureDetail (linked to from DataLog on this page)"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": null,
- "id": "33",
- "metadata": {},
- "outputs": [],
- "source": [
- "allsrc.cdb_src.get_exposures(instrument=\"lsstcomcam\")"
- ]
- },
- {
- "cell_type": "markdown",
- "id": "34",
+ "id": "31",
"metadata": {},
"source": [
"## Merged time-log with compaction DRAFT\n",
@@ -524,7 +499,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "35",
+ "id": "32",
"metadata": {},
"outputs": [],
"source": [
@@ -534,18 +509,18 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "36",
+ "id": "33",
"metadata": {},
"outputs": [],
"source": [
"# from importlib import reload\n",
"# reload(tl)\n",
- "display(HTML(tl.sutl(allsrc, delta=\"3h\", verbose=False)))"
+ "display(HTML(tl.sutl(allsrc, delta=period, verbose=False)))"
]
},
{
"cell_type": "markdown",
- "id": "37",
+ "id": "34",
"metadata": {},
"source": [
"-----------\n",
@@ -554,18 +529,19 @@
},
{
"cell_type": "markdown",
- "id": "38",
- "metadata": {},
+ "id": "35",
+ "metadata": {
+ "jp-MarkdownHeadingCollapsed": true
+ },
"source": [
"# Developer Only Section REMOVE\n",
"Contains stuff only expected to be useful to developers.\n",
- "\n",
- "This may also contain sections that have moved out of the user section because they are no longer defined in the Storyboard."
+ "This may also contain sections that have moved out of the user section."
]
},
{
"cell_type": "markdown",
- "id": "39",
+ "id": "36",
"metadata": {},
"source": [
"## Overview \n"
@@ -574,7 +550,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "40",
+ "id": "37",
"metadata": {},
"outputs": [],
"source": [
@@ -607,7 +583,7 @@
},
{
"cell_type": "markdown",
- "id": "41",
+ "id": "38",
"metadata": {},
"source": [
"## Data Status\n",
@@ -617,7 +593,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "42",
+ "id": "39",
"metadata": {},
"outputs": [],
"source": [
@@ -628,7 +604,7 @@
},
{
"cell_type": "markdown",
- "id": "43",
+ "id": "40",
"metadata": {},
"source": [
"## This report uses the following data sources\n",
@@ -643,7 +619,7 @@
},
{
"cell_type": "markdown",
- "id": "44",
+ "id": "41",
"metadata": {},
"source": [
"## Where was this run?\n",
@@ -658,7 +634,7 @@
},
{
"cell_type": "markdown",
- "id": "45",
+ "id": "42",
"metadata": {},
"source": [
"## Available Consolidated Database fields\n",
@@ -672,7 +648,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "46",
+ "id": "43",
"metadata": {},
"outputs": [],
"source": [
@@ -690,7 +666,7 @@
},
{
"cell_type": "markdown",
- "id": "47",
+ "id": "44",
"metadata": {},
"source": [
"## Section overviews moved here"
@@ -699,7 +675,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "48",
+ "id": "45",
"metadata": {},
"outputs": [],
"source": [
@@ -714,7 +690,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "49",
+ "id": "46",
"metadata": {},
"outputs": [],
"source": [
@@ -728,7 +704,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "50",
+ "id": "47",
"metadata": {},
"outputs": [],
"source": [
@@ -738,7 +714,7 @@
},
{
"cell_type": "markdown",
- "id": "51",
+ "id": "48",
"metadata": {},
"source": [
"## Finale"
@@ -747,7 +723,7 @@
{
"cell_type": "code",
"execution_count": null,
- "id": "52",
+ "id": "49",
"metadata": {},
"outputs": [],
"source": [
diff --git a/notebooks_tsqr/NightLog.yaml b/notebooks_tsqr/NightLog.yaml
index 50358fb..715f92e 100644
--- a/notebooks_tsqr/NightLog.yaml
+++ b/notebooks_tsqr/NightLog.yaml
@@ -45,3 +45,9 @@ parameters:
description: >
Enable verbose output. (Generally for development use.)
default: false
+ warning:
+ type: boolean
+ description: >
+ Enable warnings when: zero records retrieved from a source, instruments are
+ excluded, etc.
+ default: false
diff --git a/python/lsst/ts/logging_and_reporting/all_sources.py b/python/lsst/ts/logging_and_reporting/all_sources.py
index 5cd99ae..999c557 100644
--- a/python/lsst/ts/logging_and_reporting/all_sources.py
+++ b/python/lsst/ts/logging_and_reporting/all_sources.py
@@ -257,6 +257,7 @@ def night_tally_observation_gaps(self, verbose=False):
# These need join between exposures and messages.
# But in messages, they aren't reliable numbers anyhow.
+ # TODO despite unreliability, use messages values.
loss_fault = pd.NA # hours
loss_weather = pd.NA # hours
@@ -271,23 +272,38 @@ def night_tally_observation_gaps(self, verbose=False):
accounted_hours = used_hours + idle_hours
instrument_tally[instrument] = {
- "Total Night": ut.hhmmss(total_observable_hours), # (a)
- "Total Exposure": ut.hhmmss(exposure_hours), # (b)
- "Readout time(1)": ut.hhmmss(readout_hours), # (e)
- "Slew time": ut.hhmmss(slew_hours), # (g)
+ "Total Observable Night": ut.hhmmss(total_observable_hours),
+ "Total Exposure": ut.hhmmss(exposure_hours),
+ "Readout time(1)": ut.hhmmss(readout_hours),
+ "Slew time(2)": ut.hhmmss(slew_hours),
"Time loss due to fault": ut.hhmmss(loss_fault),
"Time loss due to weather": ut.hhmmss(loss_weather),
- "Idle time": ut.hhmmss(idle_hours), # (i=a-b-e-g)
- "Number of exposures": num_exposures, # (c)
- "Number of slews": num_slews if pd.notna(num_slews) else "NA", # (d)
- "Mean Slew time": ut.hhmmss(mean_slew), # (g/d)
- "Accounted hours": ut.hhmmss(accounted_hours),
+ "Idle time": ut.hhmmss(idle_hours),
+ "Number of exposures": num_exposures,
+ "Number of slews": num_slews if pd.notna(num_slews) else "NA",
+ "Mean Slew time": ut.hhmmss(mean_slew),
+ "Total Accounted time": ut.hhmmss(accounted_hours),
}
# Composition to combine Exposure and Efd (blackboard)
# ts_xml/.../sal_interfaces/Scheduler/Scheduler_Events.xml
# https://ts-xml.lsst.io/sal_interfaces/Scheduler.html#slewtime
# edf.get_targets() => "slewTime" # (d,g,h)
+ tally_remarks = {
+ "Total Observable Night": "time between 18 deg twilights",
+ "Total Exposure": "Sum of exposure times",
+ "Readout time(1)": "Sum of exposure readout times",
+ "Slew time(2)": "Sum of slew times",
+ "Time loss due to fault": "Sum of time lost due to faults (apx)",
+ "Time loss due to weather": ("Sum of time lost due to weather (apx)"),
+ "Idle time": "Sum of time doing 'nothing'",
+ "Number of exposures": "",
+ "Number of slews": "",
+ "Mean Slew time": "",
+ "Total Accounted time": "Total of above sums",
+ }
+
+ instrument_tally["Remarks"] = tally_remarks
return instrument_tally
# see source_record_counts()
@@ -406,7 +422,6 @@ def get_slews(self):
"""time when MTMount azimuthInPosition and elevationInPosition events
have their inPosition items set to False and then again when they
turn True."""
-
pass
@property
@@ -445,6 +460,7 @@ def mapper(field_value):
# Values of rec["exposure_flag"]
eflag_values = ["good", "questionable", "junk", "unknown"]
table_recs = defaultdict(dict)
+ field_name_title = field_name.title().replace("_", " ")
for field in field_values:
for eflag in eflag_values:
# Initialize to zeros
@@ -452,7 +468,7 @@ def mapper(field_value):
counter.update(
[r["exposure_flag"] for r in records if r[field_name] == field]
)
- table_recs[field]["Detail"] = gen_link(field)
+ table_recs[field][field_name_title] = gen_link(field)
table_recs[field].update(dict(counter))
# User want this?: counter.update(dict(total=counter.total()))
if table_recs:
@@ -461,10 +477,15 @@ def mapper(field_value):
index=list(table_recs.keys()),
)
df.sort_index(inplace=True)
+ # Add Total row
+ tot_df = pd.DataFrame(
+ [*df.values, ["Total", *df.sum(numeric_only=True).values]],
+ columns=df.columns,
+ )
else:
- df = pd.DataFrame()
+ tot_df = pd.DataFrame()
- return df
+ return tot_df
def fields_count_exposure(self, instrument):
exposure_field_names = [
@@ -545,7 +566,7 @@ def exposure_detail(
# no CDF, no EDF
pass # empty DF returned above
- fields = [
+ fields = {
"air_temp",
"airmass",
"altitude",
@@ -577,14 +598,14 @@ def exposure_detail(
# 'day_obs_CDB',
# 'day_obs_EXP',
# 'exposure_time',
- # 'group_name',
+ "group_name",
# 'obs_start',
- # 'target_name',
+ "target_name",
# 'timespan_end',
# 'tracking_dec',
# 'tracking_ra',
# 'visit_id',
- ]
+ }
labels = {
"air_temp": "Outside Air Temp",
"airmass": "Airmass",
@@ -615,11 +636,19 @@ def exposure_detail(
"zero_point_median": "Photometric Zero Points",
}
- used_fields = set(sorted(df.columns.to_list())) & set(fields)
+ used_fields = set(sorted(df.columns.to_list())) & fields
# #! df = ut.wrap_dataframe_columns(df[fields])
# #! df.columns = df.columns.str.title()
- print(f"DBG allsrc.exposure_detail {used_fields=} {sorted(labels.keys())=}")
+ if self.verbose:
+ print(
+ "DBG allsrc.exposure_detail " f"{used_fields=} {sorted(labels.keys())=}"
+ )
+ if self.warning:
+ if used_fields < fields:
+ msg = "Some requested fields are not available. "
+ msg += f"Requested fields not used: {fields - used_fields}"
+ warnings.warn(msg, category=ex.NotAvailWarning, stacklevel=2)
df = df[list(used_fields)].rename(columns=labels, errors="ignore")
return df
diff --git a/python/lsst/ts/logging_and_reporting/consdb.py b/python/lsst/ts/logging_and_reporting/consdb.py
index 082283b..f190c1b 100644
--- a/python/lsst/ts/logging_and_reporting/consdb.py
+++ b/python/lsst/ts/logging_and_reporting/consdb.py
@@ -108,7 +108,8 @@ def get_instruments(self, include=None):
exclude = available_instruments - include
if exclude and self.warning:
elist = ", ".join(sorted(exclude))
- warnings.warn(f"Excluding these instruments from results: {elist}")
+ msg = f"Excluding these instruments from results: {elist}"
+ warnings.warn(msg, category=ex.ExcludeInstWarning, stacklevel=2)
# Some sources are case sensitive and use CamelCase.
# ConsDB will handle either, but REPORT lower case.
@@ -230,6 +231,8 @@ def get_exposures(self, instrument):
"dimm_seeing",
"exposure_id",
"exposure_name",
+ "target_name",
+ "group_id",
"exp_time", # seconds
"obs_start", # TAI
"day_obs", # int
diff --git a/python/lsst/ts/logging_and_reporting/exceptions.py b/python/lsst/ts/logging_and_reporting/exceptions.py
index e4b3e9f..d481704 100644
--- a/python/lsst/ts/logging_and_reporting/exceptions.py
+++ b/python/lsst/ts/logging_and_reporting/exceptions.py
@@ -118,3 +118,17 @@ class NoRecordsWarning(BaseLogrepError): # noqa: N818
"""Got no records. This might be ok, or maybe there is a bug."""
error_code = "ZERORECS"
+
+
+class NotAvailWarning(BaseLogrepError): # noqa: N818
+ """Some requested fields were not available.
+ This might be ok, or maybe there is a bug.
+ """
+
+ error_code = "NOTAVAIL"
+
+
+class ExcludeInstWarning(BaseLogrepError): # noqa: N818
+ """Excluding some instruments from results."""
+
+ error_code = "EXINSTRU"
diff --git a/python/lsst/ts/logging_and_reporting/time_logs.py b/python/lsst/ts/logging_and_reporting/time_logs.py
index 00a9432..fb63ce9 100644
--- a/python/lsst/ts/logging_and_reporting/time_logs.py
+++ b/python/lsst/ts/logging_and_reporting/time_logs.py
@@ -333,7 +333,7 @@ def compact(full_df, delta="4h", allow_data_loss=False, verbose=False):
# + In Period: Replace multi-values in a column with a conctenation
# of the unique values.
# TODO General aggregation using dtypes assigned in allsrc.
-def reduce_period(df, verbose=True):
+def reduce_period(df, verbose=False):
"""Group and aggregate by Period. Drops some columns. Reduces Rows."""
def multi_string(group):
diff --git a/python/lsst/ts/logging_and_reporting/utils.py b/python/lsst/ts/logging_and_reporting/utils.py
index b52d460..ca1e693 100644
--- a/python/lsst/ts/logging_and_reporting/utils.py
+++ b/python/lsst/ts/logging_and_reporting/utils.py
@@ -41,13 +41,14 @@ def date_hr_min(iso_dt_str):
return str(dt.datetime.fromisoformat(iso_dt_str))[:16]
-def fallback_parameters(day_obs, number_of_days, verbose):
+def fallback_parameters(day_obs, number_of_days, period, verbose, warning):
"""Given parameters from Times Square, return usable versions of
all parameters. If the provide parameters are not usable, return
default usable ones.
"""
- day_obs_fb = "YESTERDAY" # Fall Back value
- days_fb = 1
+ day_obs_default = "YESTERDAY" # Fall Back value
+ days_default = 1
+ period_default = "4h"
message = ""
try:
@@ -56,23 +57,37 @@ def fallback_parameters(day_obs, number_of_days, verbose):
except Exception as err:
message += f"""\nInvalid day_obs given: {day_obs!r}
Available values are: YYYY-MM-DD, YYYYMMDD, TODAY, YESTERDAY.
- Using: {day_obs_fb!r}\n{str(err)!r}
+ Using: {day_obs_default!r}\n{str(err)!r}
"""
- day_obs = day_obs_fb
+ day_obs = day_obs_default
try:
days = int(number_of_days)
except Exception as err:
- days = days_fb
+ days = days_default
message += f"""\nInvalid number_of_days given: {number_of_days!r}
Must be an integer.
Using: {days}\n{str(err)!r}
"""
+ try:
+ now = dt.datetime.now()
+ freq = pd.Period(now, period).freqstr
+ except Exception as err:
+ freq = period_default
+ message += f"\nInvalid period given: {period!r}\n"
+ message += "Must be an Alias string formed from "
+ message += (
+ "https://pandas.pydata.org/docs/user_guide/timeseries.html#period-aliases"
+ )
+ message += f"Using: {freq}\n{str(err)!r}"
+
to_use = dict(
day_obs=day_obs,
number_of_days=days,
+ period=freq,
verbose=(verbose == "true"),
+ warning=(warning == "true"),
)
return to_use, message
diff --git a/requirements.txt b/requirements.txt
index d882562..9f10de7 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,6 +1,6 @@
requests
# jupyter-lab --ip=0.0.0.0
-jupyterlab # =3.1.17
+jupyterlab # =4.3.4
pandas
matplotlib
numpy