diff --git a/docker-compose.yml b/docker-compose.yml index 0d9ef78a..f4aacdcf 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -121,7 +121,7 @@ services: CONFIG_FILE: ./tests/configuration/post_process_consumer.conf hostname: autoreducer healthcheck: - test: ["CMD", "pgrep", "python"] + test: ["CMD", "pgrep", "queueProcessor"] depends_on: activemq: condition: service_healthy @@ -134,7 +134,7 @@ services: CONFIG_FILE: ./tests/configuration/post_process_consumer.himem.conf hostname: autoreducer.himem healthcheck: - test: ["CMD", "pgrep", "python"] + test: ["CMD", "pgrep", "queueProcessor"] depends_on: activemq: condition: service_healthy diff --git a/src/webmon_app/reporting/dasmon/view_util.py b/src/webmon_app/reporting/dasmon/view_util.py index 4f032c83..789d361d 100644 --- a/src/webmon_app/reporting/dasmon/view_util.py +++ b/src/webmon_app/reporting/dasmon/view_util.py @@ -5,7 +5,7 @@ @copyright: 2014 Oak Ridge National Laboratory """ -from reporting.report.models import Instrument, DataRun, WorkflowSummary +from reporting.report.models import Instrument, DataRun, WorkflowSummary, Information from reporting.dasmon.models import ( Parameter, StatusVariable, @@ -545,26 +545,37 @@ def postprocessing_diagnostics(timeout=None): ) nodes.append( { - "node": "%s PID %s" - % ( - item.name[ - len(settings.SYSTEM_STATUS_PREFIX) : len(item.name) - 4 # noqa E203 - ], - last_value.value, - ), + "node": item.name[ + len(settings.SYSTEM_STATUS_PREFIX) : len(item.name) - 4 # noqa E203 + ], "time": timezone.localtime(last_value.timestamp), + "msg": f"PID: {last_value.value}", } ) else: + node = item.name[len(settings.SYSTEM_STATUS_PREFIX) :] # noqa E203 last_value = StatusCache.objects.filter(instrument_id=common_services, key_id=item).latest( "timestamp" ) nodes.append( { - "node": item.name[len(settings.SYSTEM_STATUS_PREFIX) :], # noqa E203 + "node": node, "time": timezone.localtime(last_value.timestamp), } ) + + # get last run status performed from Information table by matching node name to description + try: + last_status = Information.objects.filter(description=node).latest("id") + nodes.append( + { + "node": node, # noqa E203 + "time": timezone.localtime(last_status.run_status_id.created_on), + "msg": f"Last msg: {last_status.run_status_id}", + } + ) + except: + pass except: # noqa: E722 nodes.append( { diff --git a/src/webmon_app/reporting/templates/dasmon/diagnostics.html b/src/webmon_app/reporting/templates/dasmon/diagnostics.html index e064199e..4fa1afea 100644 --- a/src/webmon_app/reporting/templates/dasmon/diagnostics.html +++ b/src/webmon_app/reporting/templates/dasmon/diagnostics.html @@ -127,7 +127,7 @@ {% for item in post_diagnostics.ar_nodes %} - + {% endfor %}
{{ item.node }}: {{ item.time }}
{{ item.node }}: {{ item.time }}{{ item.msg }}
diff --git a/src/webmon_app/reporting/tests/test_dasmon/test_view_util.py b/src/webmon_app/reporting/tests/test_dasmon/test_view_util.py index 3d503812..409341b7 100644 --- a/src/webmon_app/reporting/tests/test_dasmon/test_view_util.py +++ b/src/webmon_app/reporting/tests/test_dasmon/test_view_util.py @@ -8,7 +8,7 @@ from django.contrib.auth.models import Group from django.utils import timezone -from reporting.report.models import Instrument +from reporting.report.models import Instrument, Information, RunStatus, StatusQueue from reporting.dasmon.models import ActiveInstrument, Parameter, StatusCache, StatusVariable, Signal from workflow.database.report.models import DataRun from workflow.database.report.models import IPTS @@ -446,14 +446,58 @@ def test_workflow_diagnostics(self): def test_postprocessing_diagnostics(self): from reporting.dasmon.view_util import postprocessing_diagnostics + # add postprocessing services + common = Instrument.objects.get(name="common") + name_postprocessor = settings.SYSTEM_STATUS_PREFIX + "autoreducer4.com" + para_postprocessor = Parameter.objects.create(name=name_postprocessor) + para_postprocessor.save() + StatusCache.objects.create( + instrument_id=common, + key_id=para_postprocessor, + value=0, + timestamp=timezone.now(), + ) + para_postprocessor_pid = Parameter.objects.create(name=name_postprocessor + "_pid") + para_postprocessor_pid.save() + StatusCache.objects.create( + instrument_id=common, + key_id=para_postprocessor_pid, + value=7, + timestamp=timezone.now(), + ) + + # create StatusQueue, DataRun, RunStatus and Information needed for test + inst = Instrument.objects.get(name="testinst") + queue = StatusQueue(name="REDUCTION.COMPLETE") + queue.save() + ipts = IPTS(expt_name="IPTS-42") + ipts.save() + dataRun = DataRun(run_number=42, ipts_id=ipts, instrument_id=inst, file="/filename") + dataRun.save() + runStatus = RunStatus(run_id=dataRun, queue_id=queue) + runStatus.save() + info = Information(run_status_id=runStatus, description="autoreducer4.com") + info.save() + red_diag = postprocessing_diagnostics() # NOTE: we don't have any postprocessing data during testing, so only # test the entry that does exist assert red_diag["catalog_status"] == 0 assert red_diag["reduction_status"] == 0 - assert len(red_diag["ar_nodes"]) == 0 assert len(red_diag["conditions"]) == 0 + # for nodes we have data to check + assert len(red_diag["ar_nodes"]) == 3 + for i in range(3): + assert "time" in red_diag["ar_nodes"][i] + assert red_diag["ar_nodes"][i]["node"] == "autoreducer4.com" + + msgs = [node["msg"] for node in red_diag["ar_nodes"] if "msg" in node] + print(msgs) + assert len(msgs) == 2 + assert "PID: 7" in msgs + assert "Last msg: testinst_42: REDUCTION.COMPLETE" in msgs + def test_pvstreamer_diagnostics(self): from reporting.dasmon.view_util import pvstreamer_diagnostics diff --git a/tests/test_DASMONPageView.py b/tests/test_DASMONPageView.py index 8c86a895..6f93b6a0 100644 --- a/tests/test_DASMONPageView.py +++ b/tests/test_DASMONPageView.py @@ -35,7 +35,7 @@ def testVerifyDASMONPageView(self, dasmon_diagnostics): tree = etree.parse(StringIO(dasmon_diagnostics.text), parser) table_content = tree.xpath("//tr/td//text()") # verify number of entries in the tables - expected_number_of_entries = 43 + expected_number_of_entries = 48 assert len(table_content) == expected_number_of_entries # -- DASMON diagnostics status = table_content[1]