Traceback (most recent call last):
File "/global/cfs/cdirs/lcls/ihchang/Software/Jun10psana/spinifel/setup/conda/envs/myenv/lib/python3.8/runpy.py", line 194, in _run_module_as_main
return _run_code(code, main_globals, None,
File "/global/cfs/cdirs/lcls/ihchang/Software/Jun10psana/spinifel/setup/conda/envs/myenv/lib/python3.8/runpy.py", line 87, in _run_code
exec(code, run_globals)
File "/global/cfs/cdirs/lcls/ihchang/Software/Jun10psana/spinifel/spinifel/__main__.py", line 24, in <module>
main()
File "/global/cfs/cdirs/lcls/ihchang/Software/Jun10psana/spinifel/setup/conda/envs/myenv/lib/python3.8/site-packages/PyNVTX/__init__.py", line 33, in wrapper
ret = func(*args, **kwargs)
File "/global/cfs/cdirs/lcls/ihchang/Software/Jun10psana/spinifel/spinifel/mpi/main.py", line 89, in main
ds = DataSource(exp=settings.ps_exp, run=settings.ps_runnum,
File "/global/cfs/cdirs/lcls/ihchang/Software/Jun10psana/spinifel/setup/lcls2/psana/psana/datasource.py", line 85, in DataSource
return MPIDataSource(comms, *args, **kwargs)
File "/global/cfs/cdirs/lcls/ihchang/Software/Jun10psana/spinifel/setup/lcls2/psana/psana/psexp/mpi_ds.py", line 126, in __init__
self._setup_run()
File "/global/cfs/cdirs/lcls/ihchang/Software/Jun10psana/spinifel/setup/lcls2/psana/psana/psexp/mpi_ds.py", line 184, in _setup_run
self._setup_configs()
File "/global/cfs/cdirs/lcls/ihchang/Software/Jun10psana/spinifel/setup/lcls2/psana/psana/psexp/mpi_ds.py", line 143, in _setup_configs
self._configs = self.smdr_man.get_next_dgrams()
File "/global/cfs/cdirs/lcls/ihchang/Software/Jun10psana/spinifel/setup/lcls2/psana/psana/psexp/smdreader_manager.py", line 120, in get_next_dgrams
self._get()
File "/global/cfs/cdirs/lcls/ihchang/Software/Jun10psana/spinifel/setup/lcls2/psana/psana/psexp/smdreader_manager.py", line 105, in _get
raise ValueError(msg)
ValueError: SmdReader found dgram (16777388 MB) larger than chunksize (16.777216 MB)
--------------------------------------------------------------------------
MPI_ABORT was invoked on rank 0 in communicator MPI_COMM_WORLD
with errorcode 1.
NOTE: invoking MPI_ABORT causes Open MPI to kill all MPI processes.
You may or may not see output from other processes, depending on
exactly when Open MPI kills them.
--------------------------------------------------------------------------
srun: Job step aborted: Waiting up to 32 seconds for job step to finish.