Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
scdaq
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Iterations
Requirements
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Locked files
Build
Pipelines
Jobs
Pipeline schedules
Test cases
Artifacts
Deploy
Releases
Package Registry
Container Registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Code review analytics
Issue analytics
Insights
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
scouting-demonstrator
scdaq
Commits
1a8c346d
Commit
1a8c346d
authored
6 years ago
by
Petr Zejdl
Browse files
Options
Downloads
Patches
Plain Diff
Changing pointers to references
parent
55b9fa80
No related branches found
No related tags found
1 merge request
!59
CMSSW json file
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
src/scdaq.cc
+20
-19
20 additions, 19 deletions
src/scdaq.cc
src/scdaq.conf
+4
-2
4 additions, 2 deletions
src/scdaq.conf
with
24 additions
and
21 deletions
src/scdaq.cc
+
20
−
19
View file @
1a8c346d
...
...
@@ -39,12 +39,12 @@ using namespace std;
bool
silent
=
false
;
int
run_pipeline
(
int
n
t
hreads
,
ctrl
&
control
,
config
*
conf
)
int
run_pipeline
(
int
n
bT
hreads
,
ctrl
&
control
,
config
&
conf
)
{
config
::
InputType
input
=
conf
->
getInput
();
config
::
InputType
input
=
conf
.
getInput
();
size_t
MAX_BYTES_PER_INPUT_SLICE
=
conf
->
getDmaPacketBufferSize
();
size_t
TOTAL_SLICES
=
conf
->
getNumberOfDmaPacketBuffers
();
size_t
MAX_BYTES_PER_INPUT_SLICE
=
conf
.
getDmaPacketBufferSize
();
size_t
TOTAL_SLICES
=
conf
.
getNumberOfDmaPacketBuffers
();
// Create empty input reader, will assing later when we know what is the data source
std
::
shared_ptr
<
InputFilter
>
input_filter
;
...
...
@@ -54,20 +54,20 @@ int run_pipeline( int nthreads, ctrl& control, config *conf)
if
(
input
==
config
::
InputType
::
FILE
)
{
// Create file-reading writing stage and add it to the pipeline
MAX_BYTES_PER_INPUT_SLICE
=
192
*
conf
->
getBlocksPerInputBuffer
();
TOTAL_SLICES
=
conf
->
getNumInputBuffers
();
MAX_BYTES_PER_INPUT_SLICE
=
192
*
conf
.
getBlocksPerInputBuffer
();
TOTAL_SLICES
=
conf
.
getNumInputBuffers
();
//input_filter = std::make_shared<FileInputFilter>( conf
->
getInputFile(), MAX_BYTES_PER_INPUT_SLICE, TOTAL_SLICES );
//input_filter = std::make_shared<FileInputFilter>( conf
.
getInputFile(), MAX_BYTES_PER_INPUT_SLICE, TOTAL_SLICES );
throw
std
::
runtime_error
(
"input type FILE is temporarily not supported"
);
}
else
if
(
input
==
config
::
InputType
::
DMA
)
{
// Create DMA reader
//input_filter = std::make_shared<DmaInputFilter>( conf
->
getDmaDevice(), MAX_BYTES_PER_INPUT_SLICE, TOTAL_SLICES );
//input_filter = std::make_shared<DmaInputFilter>( conf
.
getDmaDevice(), MAX_BYTES_PER_INPUT_SLICE, TOTAL_SLICES );
throw
std
::
runtime_error
(
"input type DMA is temporarily not supported"
);
}
else
if
(
input
==
config
::
InputType
::
FILEDMA
)
{
// Create FILE DMA reader
input_filter
=
std
::
make_shared
<
FileDmaInputFilter
>
(
conf
->
getInputFile
(),
MAX_BYTES_PER_INPUT_SLICE
,
TOTAL_SLICES
,
control
);
input_filter
=
std
::
make_shared
<
FileDmaInputFilter
>
(
conf
.
getInputFile
(),
MAX_BYTES_PER_INPUT_SLICE
,
TOTAL_SLICES
,
control
);
}
else
if
(
input
==
config
::
InputType
::
WZDMA
)
{
// Create WZ DMA reader
...
...
@@ -87,23 +87,23 @@ int run_pipeline( int nthreads, ctrl& control, config *conf)
// Create reformatter and add it to the pipeline
// TODO: Created here so we are not subject of scoping, fix later...
StreamProcessor
stream_processor
(
MAX_BYTES_PER_INPUT_SLICE
);
if
(
conf
->
getEnableStreamProcessor
()
)
{
if
(
conf
.
getEnableStreamProcessor
()
)
{
pipeline
.
add_filter
(
stream_processor
);
}
// Create elastic populator (if requested)
std
::
string
url
=
conf
->
getElasticUrl
();
std
::
string
url
=
conf
.
getElasticUrl
();
// TODO: Created here so we are not subject of scoping, fix later...
ElasticProcessor
elastic_processor
(
MAX_BYTES_PER_INPUT_SLICE
,
&
control
,
url
,
conf
->
getPtCut
(),
conf
->
getQualCut
());
if
(
conf
->
getEnableElasticProcessor
()
)
{
conf
.
getPtCut
(),
conf
.
getQualCut
());
if
(
conf
.
getEnableElasticProcessor
()
)
{
pipeline
.
add_filter
(
elastic_processor
);
}
std
::
string
output_file_base
=
conf
->
getOutputFilenameBase
();
std
::
string
output_file_base
=
conf
.
getOutputFilenameBase
();
// Create file-writing stage and add it to the pipeline
OutputStream
output_stream
(
output_file_base
.
c_str
(),
control
);
...
...
@@ -113,7 +113,7 @@ int run_pipeline( int nthreads, ctrl& control, config *conf)
tbb
::
tick_count
t0
=
tbb
::
tick_count
::
now
();
// Need more than one token in flight per thread to keep all threads
// busy; 2-4 works
pipeline
.
run
(
n
t
hreads
*
4
);
pipeline
.
run
(
n
bT
hreads
*
4
);
tbb
::
tick_count
t1
=
tbb
::
tick_count
::
now
();
if
(
!
silent
)
{
...
...
@@ -147,9 +147,10 @@ int main( int argc, char* argv[] ) {
server
s
(
io_service
,
conf
.
getPortNumber
(),
control
);
boost
::
thread
t
(
boost
::
bind
(
&
boost
::
asio
::
io_service
::
run
,
&
io_service
));
int
p
=
conf
.
getNumThreads
();
tbb
::
task_scheduler_init
init
(
p
);
if
(
!
run_pipeline
(
p
,
control
,
&
conf
))
int
nbThreads
=
conf
.
getNumThreads
();
tbb
::
task_scheduler_init
init
(
nbThreads
);
if
(
!
run_pipeline
(
nbThreads
,
control
,
conf
))
return
1
;
// utility::report_elapsed_time((tbb::tick_count::now() - mainStartTime).seconds());
...
...
This diff is collapsed.
Click to expand it.
src/scdaq.conf
+
4
−
2
View file @
1a8c346d
...
...
@@ -20,7 +20,7 @@ dma_number_of_packet_buffers:1000
# Print report each N packets, use 0 to disable
packets_per_report
:
5000
#packets_per_report:1
0
#packets_per_report:1
## Settings for file input
#input_file:/dev/shm/testdata.bin
...
...
@@ -34,12 +34,14 @@ blocks_buffer:1000
output_filename_base
:/
fff
/
BU0
/
ramdisk
/
scdaq
max_file_size
:
8589934592
threads
:
8
# Elastics processor
port
:
8000
elastic_url
:
http
://
something
.
somewhere
pt_cut
:
7
quality_cut
:
12
# Pipeline settings
threads
:
8
enable_stream_processor
:
yes
enable_elastic_processor
:
no
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment