Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support Multiple DFOs through an update to the DFO Protocol #394

Open
wants to merge 15 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,10 @@ daq_add_plugin( HDF5DataStore duneDataStore LINK_LIBRARIES dfmodules hdf5lib
daq_add_plugin( FragmentAggregatorModule duneDAQModule LINK_LIBRARIES dfmodules iomanager::iomanager )
daq_add_plugin( DataWriterModule duneDAQModule LINK_LIBRARIES dfmodules hdf5libs::hdf5libs iomanager::iomanager )
daq_add_plugin( DFOModule duneDAQModule LINK_LIBRARIES dfmodules iomanager::iomanager )
daq_add_plugin( DFOBrokerModule duneDAQModule LINK_LIBRARIES dfmodules iomanager::iomanager )
daq_add_plugin( TRBModule duneDAQModule LINK_LIBRARIES dfmodules iomanager::iomanager )
daq_add_plugin( FakeDataProdModule duneDAQModule LINK_LIBRARIES dfmodules iomanager::iomanager)
daq_add_plugin( FakeDFOClientModule duneDAQModule LINK_LIBRARIES dfmodules iomanager::iomanager )
daq_add_plugin( TPStreamWriterModule duneDAQModule LINK_LIBRARIES dfmodules hdf5libs::hdf5libs trigger::trigger Boost::iostreams )

##############################################################################
Expand All @@ -45,8 +47,8 @@ add_dependencies( HDF5Write_test dfmodules_HDF5DataStore_duneDataStore )
daq_add_unit_test( DFOModule_test LINK_LIBRARIES dfmodules )
add_dependencies( DFOModule_test dfmodules_DFOModule_duneDAQModule)

daq_add_unit_test( TriggerRecordBuilderData_test LINK_LIBRARIES dfmodules)
daq_add_unit_test( DataStoreFactory_test LINK_LIBRARIES dfmodules)
daq_add_unit_test( TriggerRecordBuilderData_test LINK_LIBRARIES dfmodules )
daq_add_unit_test( DataStoreFactory_test LINK_LIBRARIES dfmodules )

##############################################################################

Expand Down
87 changes: 87 additions & 0 deletions integtest/dfo_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
import pytest
import os
import copy

import integrationtest.data_file_checks as data_file_checks
import integrationtest.log_file_checks as log_file_checks
import integrationtest.data_classes as data_classes

pytest_plugins = "integrationtest.integrationtest_drunc"

# Values that help determine the running conditions
run_duration = 20 # seconds

# Default values for validation parameters
check_for_logfile_errors = True

ignored_logfile_problems = {
"-controller": [
"Worker with pid \\d+ was terminated due to signal",
"Connection '.*' not found on the application registry",
],
"connectivity-service": [
"errorlog: -",
],
"local-connection-server": [
"was sent SIGHUP!",
],
"log_.*": ["connect: Connection refused"],
}

# The arguments to pass to the config generator, excluding the json
# output directory (the test framework handles that)

config_obj = data_classes.drunc_config()
config_obj.attempt_cleanup = True
config_obj.op_env = "dfotest"
config_obj.config_db = os.path.dirname(__file__) + "/../test/config/dfo-test.data.xml"
config_obj.session = "dfo-test"

confgen_arguments = {
"DFO Test": config_obj,
}


# The commands to run in nanorc, as a list
nanorc_command_list = (
"boot wait 5 conf".split()
+ "start --run-number 101 wait 1 enable-triggers wait ".split()
+ [str(run_duration)]
+ "disable-triggers wait 2 drain-dataflow wait 2 stop-trigger-sources stop wait 2".split()
+ "start --run-number 102 wait 1 enable-triggers wait ".split()
+ [str(run_duration)]
+ "enable-dfo --dfo-name dfo-02 wait ".split()
+ [str(run_duration)]
+ "disable-triggers wait 2 drain-dataflow wait 2 stop-trigger-sources stop wait 2".split()
+ "start --run-number 103 wait 1 enable-dfo --dfo-name dfo-02 enable-triggers wait ".split()
+ [str(run_duration)]
+ "disable-triggers wait 2 drain-dataflow wait 2 stop-trigger-sources stop wait 2".split()
+ "scrap wait 5 terminate".split()
)

# The tests themselves


def test_nanorc_success(run_nanorc):

# Check that nanorc completed correctly
assert run_nanorc.completed_process.returncode == 0


def test_log_files(run_nanorc):
current_test = os.environ.get("PYTEST_CURRENT_TEST")

# Check that at least some of the expected log files are present
assert any(
f"{run_nanorc.session}_df-01" in str(logname)
for logname in run_nanorc.log_files
)
assert any(
f"{run_nanorc.session}_dfo" in str(logname) for logname in run_nanorc.log_files
)

if check_for_logfile_errors:
# Check that there are no warnings or errors in the log files
assert log_file_checks.logs_are_error_free(
run_nanorc.log_files, True, True, ignored_logfile_problems
)
Loading
Loading