Skip to content

Commit

Permalink
Merge pull request #3 from DUNE-DAQ/aabedabu_devel
Browse files Browse the repository at this point in the history
Aabedabu devel
  • Loading branch information
adam-abed-abud authored Nov 8, 2021
2 parents 9da05e2 + 442829b commit 7c5c3ae
Show file tree
Hide file tree
Showing 12 changed files with 444 additions and 119 deletions.
38 changes: 18 additions & 20 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,29 +1,27 @@
cmake_minimum_required(VERSION 3.12 FATAL_ERROR)

set(PACKAGE DAQDecoder)
set(PACKAGE_VERSION 0.1)

project(${PACKAGE} VERSION ${PACKAGE_VERSION}
DESCRIPTION "Decoder for the the DUNE-DAQ HDF5 files"
LANGUAGES C CXX)

# Print version
message(STATUS "Building ${PACKAGE} version ${PACKAGE_VERSION}")

project(hdf5libs VERSION 1.0.0)

find_package(daq-cmake REQUIRED)
daq_setup_environment()



find_package(logging REQUIRED)
find_package(HighFive REQUIRED)
find_package(dataformats REQUIRED)
find_package(daqdataformats REQUIRED)
find_package(detdataformats REQUIRED)
find_package(detchannelmaps REQUIRED)
find_package(cetlib REQUIRED) # Uses the daq-buildtools/cmake/Findcetlib.cmake

daq_setup_environment()

daq_add_library (DAQDecoder.cpp LINK_LIBRARIES ers::ers HighFive dataformats::dataformats logging::logging )
##############################################################################
# Main library
daq_add_library (DAQDecoder.cpp LINK_LIBRARIES ers::ers HighFive daqdataformats::daqdataformats logging::logging detdataformats::detdataformats detchannelmaps::detchannelmaps ${CETLIB} ${CETLIB_EXCEPT})

##############################################################################
# Python bindings
daq_add_python_bindings(*.cpp LINK_LIBRARIES ${PROJECT_NAME})

# Demo
daq_add_application(hdf5_demo_reader demo.cpp LINK_LIBRARIES DAQDecoder)
##############################################################################
# Applications
daq_add_application(hdf5_demo_tpc_decoder demo_tpc_decoder.cpp LINK_LIBRARIES ${PROJECT_NAME})
daq_add_application(hdf5_demo_pd_decoder demo_pd_decoder.cpp LINK_LIBRARIES ${PROJECT_NAME})

#daq_install()
daq_install()
16 changes: 6 additions & 10 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,25 +6,23 @@
```sh
source /cvmfs/dunedaq.opensciencegrid.org/setup_dunedaq.sh

setup_dbt dunedaq-v2.8.0
setup_dbt dunedaq-v2.8.1

dbt-workarea-env
```


# Build
```sh
cd build

cmake ..

make -j
dbt-build.sh
```

# Run (example)

```sh
./apps/demo ../swtest.hdf5 1
./apps/hdf5_demo_tpc_decoder [PATH_TO_HDF5_FILE/file.hdf5] [VDColdboxChannelMap|ProtoDUNESP1ChannelMap] [number of events to read]

./apps/hdf5_demo_pd_decoder [PATH_TO_HDF5_FILE/file.hdf5] [number of events to read]
```

# Some links on DUNE-DAQ
Expand All @@ -33,7 +31,5 @@ make -j


# TODO
- Clean up
- Add more HDF5 features
- create a library

- ERS issues
77 changes: 0 additions & 77 deletions apps/demo.cpp

This file was deleted.

62 changes: 62 additions & 0 deletions apps/demo_pd_decoder.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
/**
* @file demo_pd_decoder.cpp
*
* Demo of HDF5 file reader for PD fragments: this example shows how to extract fragments from a file and decode SSP frames.
*
*
* This is part of the DUNE DAQ Software Suite, copyright 2020.
* Licensing/copyright details are in the COPYING file that you should have
* received with this code.
*/


#include <iostream>
#include <string>
#include <fstream>

#include "logging/Logging.hpp"
#include "hdf5libs/DAQDecoder.hpp"
#include "utils.hpp"

using namespace dunedaq::hdf5libs;


int main(int argc, char** argv){
std::cout << "Starting PD decoder" << std::endl;

// Default number of records to read
int num_trs = 1;
if(argc <2) {
std::cerr << "Usage: tpc_decoder <fully qualified file name> [number of events to read]" << std::endl;
return -1;
}

if(argc == 3) {
num_trs = std::stoi(argv[2]);
std::cout << "Number of events to read: " << num_trs << std::endl;
}


DAQDecoder decoder = DAQDecoder(argv[1], num_trs);

std::vector<std::string> datasets_path = decoder.get_fragments(num_trs);
//std::vector<std::string> datasets_path = decoder.get_trh(num_trs);

std::cout << "Number of fragments: " << datasets_path.size() << std::endl;

// Read all the fragments
int dropped_fragments = 0;
int fragment_counter = 0;
for (auto& element : datasets_path) {
fragment_counter += 1;
std::cout << "Reading fragment " << fragment_counter << "/" << datasets_path.size() << std::endl;
std::cout << "Number of dropped fragments: " << dropped_fragments << std::endl;
ReadSSPFrag(decoder.get_frag_ptr(element), dropped_fragments);
}



std::cout << "Finished parsing all fragments" << std::endl;

return 0;
}
99 changes: 99 additions & 0 deletions apps/demo_tpc_decoder.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
/**
* @file demo_tpc_decoder.cpp
*
* Demo of HDF5 file reader for TPC fragments: this example shows how to extract fragments from a file and decode WIB frames.
*
*
* This is part of the DUNE DAQ Software Suite, copyright 2020.
* Licensing/copyright details are in the COPYING file that you should have
* received with this code.
*/


#include <iostream>
#include <string>
#include <fstream>

#include "logging/Logging.hpp"
#include "hdf5libs/DAQDecoder.hpp"
#include "utils.hpp"

#include "detchannelmaps/TPCChannelMap.hpp"

using namespace dunedaq::hdf5libs;
using namespace dunedaq::detchannelmaps;

int main(int argc, char** argv){
std::cout << "Starting TPC decoder" << std::endl;

// Default number of records to read
int num_trs = 1000000;
if(argc <3) {
std::cerr << "Usage: demo <fully qualified file name> <VDColdboxChannelMap | ProtoDUNESP1ChannelMap> [number of events to read]" << std::endl;
return -1;
}

if(argc == 4) {
num_trs = std::stoi(argv[3]);
std::cout << "Number of events to read: " << num_trs << std::endl;
}


DAQDecoder decoder = DAQDecoder(argv[1], num_trs);

std::vector<std::string> datasets_path = decoder.get_fragments(num_trs);
//std::vector<std::string> datasets_path = decoder.get_trh(num_trs);

std::cout << "Number of fragments: " << datasets_path.size() << std::endl;

// Read all the fragments
int dropped_fragments = 0;
int fragment_counter = 0;

size_t raw_data_packets = (decoder.get_frag_ptr(datasets_path[0])->get_size() - sizeof(dunedaq::daqdataformats::FragmentHeader)) / sizeof(dunedaq::detdataformats::WIBFrame);

std::map<size_t, std::pair<float,float> > offline_map;
std::vector<uint32_t> adc_channels_sums(raw_data_packets,0);
std::shared_ptr<TPCChannelMap> vdcb_map = make_map(argv[2]);

for (auto& element : datasets_path) {
fragment_counter += 1;
std::cout << "Reading fragment " << fragment_counter << "/" << datasets_path.size() << std::endl;
std::cout << "Number of dropped fragments: " << dropped_fragments << std::endl;
ReadWibFrag(decoder.get_frag_ptr(element), vdcb_map, &offline_map, &adc_channels_sums, dropped_fragments);
}


std::ofstream output_file_plane_0("offline_map_mean_stddev_0.txt");
std::ofstream output_file_plane_1("offline_map_mean_stddev_1.txt");
std::ofstream output_file_plane_2("offline_map_mean_stddev_2.txt");
int plane = 0;
for (auto p : offline_map) {
try {
plane = vdcb_map->get_plane_from_offline_channel(p.first);
if(plane == 0) {
output_file_plane_0 << p.first << " " << p.second.first << " " << p.second.second << std::endl;
} else if (plane == 1) {
output_file_plane_1 << p.first << " " << p.second.first << " " << p.second.second << std::endl;
} else {
output_file_plane_2 << p.first << " " << p.second.first << " " << p.second.second << std::endl;
}
}
catch (std::exception & e) {
std::cout << "Offline channel=" << p.first << " " << e.what() << std::endl;
}
}

std::ofstream output_file_2("summed_adcs.txt");
uint64_t ts = 0;
for (size_t i = 0; i < 8192 ; ++i) {
output_file_2 << ts << " " << adc_channels_sums[i] <<std::endl;
ts += 500;
}



std::cout << "Finished parsing all fragments" << std::endl;

return 0;
}
Loading

0 comments on commit 7c5c3ae

Please sign in to comment.