Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft version of the desync monitor #728

Open
wants to merge 18 commits into
base: v1.x-dev
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
7 changes: 6 additions & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ ENDIF()

# include directories
#INCLUDE_DIRECTORIES( ./main/include )
INCLUDE_DIRECTORIES( ./extern/jsoncons-0.110.0/include)
INCLUDE_DIRECTORIES( ./extern/jsoncons-0.171.1/include)

# additional packages to be searched for by cmake
LIST( APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake )
Expand Down Expand Up @@ -192,6 +192,11 @@ IF(BUILD_runsplitter)
add_subdirectory(etc/tools/runsplitter)
ENDIF()

option(BUILD_desynccorr "Compile desynccorr executable?" OFF)
IF(BUILD_desynccorr)
add_subdirectory(etc/tools/desynccorr)
ENDIF()



###############################################
Expand Down
16 changes: 16 additions & 0 deletions etc/tools/desynccorr/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
find_package(ROOT REQUIRED COMPONENTS Core Hist)
INCLUDE_DIRECTORIES(${ROOT_INCLUDE_DIRS})

INCLUDE_DIRECTORIES(include)

ADD_EXECUTABLE("desynccorr" src/desynccorr.cxx)
ADD_EXECUTABLE("yarr_resync" src/yarr_resync.cxx)

target_link_libraries("yarr_resync" EUDAQ)
target_link_libraries("desynccorr" EUDAQ ROOT::Core ROOT::Hist)

SET(name "desynccorr" "yarr_resync")
INSTALL(TARGETS ${name}
RUNTIME DESTINATION bin COMPONENT MAIN_EXE
LIBRARY DESTINATION lib COMPONENT MAIN_EXE
ARCHIVE DESTINATION lib COMPONENT MAIN_EXE)
92 changes: 92 additions & 0 deletions etc/tools/desynccorr/include/clustering.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
#ifndef EUDAQ_INCLUDED_Desynccorr_Clustering
#define EUDAQ_INCLUDED_Desynccorr_Clustering

#include <vector>

struct pixel_hit {
int x;
int y;
pixel_hit(int x, int y):
x(x), y(y) {}
pixel_hit() = delete;
};

struct cluster {
double x;
double y;
std::vector<pixel_hit> pixel_hits;
cluster(double x, double y) : x(x), y(y) {}
cluster(double x, double y, std::vector<pixel_hit> &&pixel_hits) : x(x), y(y), pixel_hits(std::move(pixel_hits)) {}
cluster() = delete;
};

//poor man's (woman's, person's?) clustering
std::vector<cluster> clusterHits(std::vector<pixel_hit> const & hits, int spatCutSqrd){
//single pixel_hit events are easy, we just got one single hit cluster
if(hits.size() == 1){
std::vector<cluster> result;
auto const & pix = hits[0];
//For single pixel clusters the cluster position is in the centre of the pixel
//Since we start counting at pixel 1 (not at 0), this is shifter by -0.5 px //check if this holds in EUDAQ, this was for something else
result.emplace_back(pix.x-0.5, pix.y-0.5);
result.back().pixel_hits = hits;
return result;
//multi pixel_hit events are more complicated
} else {
std::vector<pixel_hit> hitPixelVec = hits;
std::vector<pixel_hit> newlyAdded;
std::vector<cluster> clusters;

while( !hitPixelVec.empty() ) {
//this is just a placeholder cluster so far, we will add hits and do the CoG computation later
clusters.emplace_back(-1.,-1);
newlyAdded.push_back( hitPixelVec.front() );
clusters.back().pixel_hits.push_back( hitPixelVec.front() );
hitPixelVec.erase( hitPixelVec.begin() );

while( !newlyAdded.empty() ) {
bool newlyDone = true;
int x1, x2, y1, y2, dX, dY;

for( auto candidate = hitPixelVec.begin(); candidate != hitPixelVec.end(); ++candidate ){
//get the relevant infos from the newly added pixel
x1 = newlyAdded.front().x;
y1 = newlyAdded.front().y;

//and the pixel we test against
x2 = candidate->x;
y2 = candidate->y;
dX = x1-x2;
dY = y1-y2;
int spatDistSqrd = dX*dX+dY*dY;

if( spatDistSqrd <= spatCutSqrd ) {
newlyAdded.push_back( *candidate );
clusters.back().pixel_hits.push_back( *candidate );
hitPixelVec.erase( candidate );
newlyDone = false;
break;
}
}
if(newlyDone) {
newlyAdded.erase(newlyAdded.begin());
}
}
}
//do the CoG cluster computation
for(auto& c: clusters) {
float x_sum = 0;
float y_sum = 0;
for(auto const & h: c.pixel_hits) {
x_sum += h.x;
y_sum += h.y;
}
c.x = x_sum/c.pixel_hits.size()-0.5;
c.y = y_sum/c.pixel_hits.size()-0.5;
}

return clusters;
}
}

#endif // EUDAQ_INCLUDED_Desynccorr_Clustering
123 changes: 123 additions & 0 deletions etc/tools/desynccorr/include/syncobject.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
#ifndef EUDAQ_INCLUDED_Desynccorr_SyncObject
#define EUDAQ_INCLUDED_Desynccorr_SyncObject

#include "jsoncons/json.hpp"

template <typename T>
class overflowbuffer {
public:
overflowbuffer(std::size_t N, T t = T()): _depth(N) {
_c.resize(2*_depth+1, t);
}
void push(T& v) {
_c.pop_front();
_c.emplace_back(v);
}
T & get(int index) {
return _c.at(_depth+index);
}

private:
std::deque<T> _c;
std::size_t _depth;
};

struct plane_sync {

plane_sync(int run_number, int nevts_per_bin, int max_shift): _run_number(run_number), _nevts_per_bin(nevts_per_bin), _max_shift(max_shift) {}

plane_sync(std::string fname) {
std::ifstream is(fname);
jsoncons::json j = jsoncons::json::parse(is);
_nevts_per_bin = j["nevts_per_bin"].as<int>();
_run_number = j["run_number"].as<int>();
_max_shift = j["max_shift"].as<int>();
_good_blocks = j["good_blocks"].as<std::vector<bool>>();
_block_size = _good_blocks.size();
jsoncons::json jp = j["plane_shifts"];
for(const auto& p : jp.object_range()) {
auto key = std::stoi(std::string{p.key()});
auto val = p.value().as<std::vector<int>>();
_desync_data[key] = val;
if(!_has_data) {
_has_data = true;
}
if( (_block_size != val.size()) || val.size() < 3) {
throw std::runtime_error("Adding plane "+std::to_string(key)+" failed because of wrong size. Passed size is : "
+std::to_string(val.size())+" and expected size is larger than 3 and/or "+std::to_string(_block_size));
}
}
}

int get_resync_value(int plane, int evt) const {
if(_desync_data.find(plane) == _desync_data.end()) {
return 0;
} else {
return _desync_data.at(plane).at(evt/_nevts_per_bin);
}
}

int is_good_evt(int evt) const {
return _good_blocks.at(evt/_nevts_per_bin);
}

void add_plane(int plane, std::vector<int> const & v) {
if( (_has_data && _block_size != v.size()) || v.size() < 3) {
throw std::runtime_error("Adding plane "+std::to_string(plane)+" failed because of wrong size. Passed size is : "
+std::to_string(v.size())+" and expected size is larger than 3 and/or "+std::to_string(_block_size));
return;
} else if(!_has_data) {
_has_data = true;
_block_size = v.size();
_good_blocks.resize(v.size(), true);
_good_blocks[0] = false;
}
_desync_data[plane] = v;
auto current_shift = v[1];
for(std::size_t ix = 2; ix < v.size(); ix++) {
if( (v[ix] != current_shift) || (v[ix] == -_max_shift) ) {
_good_blocks[ix-1] = false;
_good_blocks[ix] = false;
current_shift = v[ix];
}
}
}

void write_json(std::string const & outpath) const {
jsoncons::json j;
jsoncons::json jplane;
j["run_number"] = _run_number;
j["max_shift"] = _max_shift;
j["nevts_per_bin"] = _nevts_per_bin;
for(auto const & [id, v]: _desync_data) {
jplane[std::to_string(id)] = jsoncons::json{v};
}
j["plane_shifts"] = jplane;
j["good_blocks"] = jsoncons::json{_good_blocks};
auto ofile = std::ofstream(outpath, std::ios::out | std::ios::trunc);
ofile << jsoncons::pretty_print(j);
}

int good_events() const {
return _nevts_per_bin*(_block_size-1);
}

int run_number() const {
return _run_number;
}

int max_shift() const {
return _max_shift;
}

private:
std::vector<bool> _good_blocks;
std::map<int, std::vector<int>> _desync_data;
int _nevts_per_bin = 0;
int _run_number = 0;
int _max_shift = 0;
bool _has_data = false;
std::size_t _block_size = 0;
};

#endif // EUDAQ_INCLUDED_Desynccorr_SyncObject