Skip to content

Commit

Permalink
Cycle 23/5 bugfixes and instrument info update (#10)
Browse files Browse the repository at this point in the history
* Bugfixes for start of cycle 24/5

Fix bug in saving powder instrument info
Update to use new DAaaS data paths
Update MARI background scan

* Fix typo in new raw path; Add norm for Cd runs

* Fix autoreduction bugs

MARI Whitebeam vanadium runs with wrong num spectra
Reduction of quiet counts or whitevan will now error
Hopefully fix workspace invalidated error

* Remove MARI sub_ana routines (Cycle 24/2)

* Fix possible loading error of incomplete files

* Fix bug in l2 determination for preloaded workspace

* Instrument info: reps and type fixes

+ Change data type of chopper speed to float32
+ Fix energy data type (was int32, should have been float32)
+ Parses "rep_xxx" fields in raw nxs correctly,
  and omits them from nxspe files

* Fix monovan bugs and LET .raw loading issue

* Update inst info routines
  • Loading branch information
mducle authored Aug 20, 2024
1 parent 1e0c7fe commit 6127d46
Show file tree
Hide file tree
Showing 4 changed files with 106 additions and 49 deletions.
5 changes: 4 additions & 1 deletion reduction_files/DG_monovan.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,11 +129,14 @@
# in the current monitor workspace (post monochromator) is at t=0 and L=0
# note that the offest is not the predicted value due to energy dependence of the source position

Ei_orig = Ei
if m3spec is not None and not fixei:
(Ei,mon2_peak,_,_) = GetEi(mv_monitors,Monitor1Spec=m2spec,Monitor2Spec=m3spec,EnergyEstimate=Ei)
(Ei,mon2_peak,_,_) = GetEi(mv_monitors,Monitor1Spec=m2spec,Monitor2Spec=m3spec,EnergyEstimate=Ei,FixEi=fixei)
print("... refined Ei=%.2f meV" % Ei)
else:
(Ei,mon2_peak,_,_) = GetEi(mv_monitors,Monitor2Spec=m2spec,EnergyEstimate=Ei,FixEi=fixei)
if np.isnan(Ei):
Ei = Ei_orig
print("... m2 tof=%.2f mus, m2 pos=%.2f m" % (mon2_peak,m2pos))

mv_norm = ScaleX(mv_norm, Factor=-mon2_peak, Operation='Add', InstrumentParameter='DelayTime', Combine=True)
Expand Down
39 changes: 17 additions & 22 deletions reduction_files/DG_reduction.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,6 @@
cs_block = None
cs_block_unit = ''
cs_bin_size = 0
# MARI only: used to subtract an analytic approx of instrument background
sub_ana = False
#========================================================

#==================Absolute Units Inputs=================
Expand All @@ -72,7 +70,7 @@
Qbins = 20 # approximate number of Q-bins for QENS
theta_range = [5., 65.] # useful theta range for Q-binning (QENS)
idebug = False # keep workspaces and check absolute units on elastic line
save_dir = f'/instrument/{inst}/RBNumber/USER_RB_FOLDER' # Set to None to avoid reseting
save_dir = f'/data/analysis/{inst}/RBNumber/USER_RB_FOLDER' # Set to None to avoid reseting
psi_motor_name = 'rot' # name of the rotation motor in the logs
angles_workspace = 'angles_ws' # name of workspace to store previously seen angles
sumruns_savemem = False # Compresses event in summed ws to save memory
Expand Down Expand Up @@ -111,10 +109,12 @@

cycle_shortform = cycle[2:] if cycle.startswith('20') else cycle
datadir = f'/archive/NDX{inst}/Instrument/data/cycle_{cycle_shortform}/'
datadir2 = f'/data/instrument/{inst}/CYCLE20{cycle_shortform.replace("_","")}/USER_RB_FOLDER/'
instfiles = '/usr/local/mprogs/InstrumentFiles/'
mapdir = f'{instfiles}/{inst.swapcase()}/'
config.appendDataSearchDir(mapdir)
config.appendDataSearchDir(datadir)
config.appendDataSearchDir(datadir2)
ws_list = ADS.getObjectNames()

# Try to load subsidiary utilities
Expand All @@ -140,26 +140,18 @@ def tryload(irun): # loops till data file appears
try:
ws = Load(str(irun),LoadMonitors=True)
except TypeError:
ws = Load(str(irun),LoadMonitors='Separate')
try:
ws = Load(str(irun),LoadMonitors='Separate')
except ValueError: # Possibly when file is being copied over from NDX machine
Pause(2)
ws = Load(str(irun),LoadMonitors=True)
except ValueError:
print(f'...waiting for run #{irun}')
Pause(file_wait)
continue
break
if inst == 'MARI':
remove_extra_spectra_if_mari('ws')
if sub_ana is True and not sample_cd:
if 'bkg_ev' not in mtd:
gen_ana_bkg()
current = mtd['ws'].run().getProtonCharge()
if isinstance(mtd['ws'], mantid.dataobjects.EventWorkspace):
ws = mtd['ws'] - mtd['bkg_ev'] * current
else:
try:
ws = mtd['ws'] - mtd['bkg_his'] * current
except ValueError:
gen_ana_bkg(target_ws=mtd['ws'])
ws = mtd['ws'] - mtd['bkg_his'] * current
return mtd['ws']

def load_sum(run_list, block_name=None):
Expand Down Expand Up @@ -222,11 +214,12 @@ def load_sum(run_list, block_name=None):
# =======================load background runs and sum=========================
if sample_cd is not None:
ws_cd = load_sum(sample_cd)
ws_cd = NormaliseByCurrent(ws_cd)
if sample_bg is not None:
ws_bg = load_sum(sample_bg)
ws_bg = NormaliseByCurrent(ws_bg)
if sample_cd is not None:
ws_bg = ws_bg - ws_cd
ws_bg = NormaliseByCurrent(ws_bg)

# ==========================continuous scan stuff=============================
if cs_block and cs_bin_size > 0:
Expand Down Expand Up @@ -265,13 +258,17 @@ def load_sum(run_list, block_name=None):
use_auto_ei = True
try:
Ei_list = autoei(ws)
except NameError:
except (NameError, RuntimeError) as e:
fn = str(sample[0])
if not fn.startswith(inst[:3]): fn = f'{inst[:3]}{fn}'
if fn.endswith('.raw'): fn = fn[:-4]
if fn[-4:-2] == '.s': fn = fn[:-4] + '.n0' + fn[-2:]
if not fn.endswith('.nxs') and fn[-5:-2] != '.n0': fn += '.nxs'
Ei_list = autoei(LoadNexusMonitors(fn, OutputWorkspace='ws_tmp_mons'))
ws_tmp_mons = LoadNexusMonitors(fn, OutputWorkspace='ws_tmp_mons')
Ei_list = autoei(ws_tmp_mons)
if not Ei_list:
raise RuntimeError(f'Invalid run(s) {sample}. Chopper(s) not running ' \
'or could not determine neutron energy')
print(f"Automatically determined Ei's: {Ei_list}")
if len(trans) < len(Ei_list):
print(f'{inst}: WARNING - not enough transmision values for auto-Ei. ' \
Expand Down Expand Up @@ -354,7 +351,7 @@ def load_sum(run_list, block_name=None):
# instrument geometry to work out ToF ranges
sampos = ws.getInstrument().getSample().getPos()
l1 = (sampos - ws.getInstrument().getSource().getPos()).norm()
l2 = (ws.getDetector(0).getPos() - sampos).norm()
l2 = (ws.getDetector(ws.getSpectrumNumbers()[0]).getPos() - sampos).norm()

# Updates ei_loop if auto _and_ not using monovan
if use_auto_ei:
Expand Down Expand Up @@ -452,8 +449,6 @@ def load_sum(run_list, block_name=None):
ofile_suffix += 'sum'
if sample_bg is not None:
ofile_suffix += '_sub'
if sub_ana:
ofile_suffix += '_sa'

# output nxspe file
ofile = f'{ofile_prefix}_{origEi:g}meV{ofile_suffix}'
Expand Down
7 changes: 6 additions & 1 deletion reduction_files/DG_whitevan.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
wv_lrange = [1,5] # wavelength integration limits for output of vanadium integrals
wv_detrange = [30000,60000] # spectrum index range for average intensity calculation
idebug = False # keep itermediate workspaces for debugging
save_dir = f'/instrument/{inst}/RBNumber/USER_RB_FOLDER' # Set to None to avoid resetting
save_dir = f'/data/analysis/{inst}/RBNumber/USER_RB_FOLDER' # Set to None to avoid resetting
#========================================================
#!end_params

Expand All @@ -43,6 +43,8 @@
cycle_shortform = cycle[2:] if cycle.startswith('20') else cycle
data_dir = f'/archive/NDX{inst}/Instrument/data/cycle_{cycle_shortform}/'
config.appendDataSearchDir(data_dir)
data_dir = f'/data/instrument/{inst}/CYCLE20{cycle_shortform.replace("_","")}/RB0/'
config.appendDataSearchDir(data_dir)
if save_dir is not None:
config.appendDataSearchDir(save_dir)

Expand All @@ -57,6 +59,9 @@ def try_load_no_mon(irun, ws_name=None):
ws_name = ws_name[0]
try:
return Load(str(irun), LoadMonitors='Exclude', OutputWorkspace=ws_name)
except RuntimeError:
ws_load = Load(str(irun), LoadMonitors='Include', OutputWorkspace=ws_name)
ExtractMonitors(ws_load, DetectorWorkspace=ws_name, MonitorWorkspace=ws_name+'_monitors')
except ValueError:
return Load(str(irun), LoadMonitors=False, OutputWorkspace=ws_name)

Expand Down
104 changes: 79 additions & 25 deletions reduction_files/reduction_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,46 +110,79 @@ def copy_inst_info(outfile, in_ws):
return
if not os.path.exists(outfile):
outfile = os.path.join(mantid.simpleapi.config['defaultsave.directory'], os.path.basename(outfile))
en0 = mtd[in_ws].getEFixed(mtd[in_ws].getDetector(0).getID())
with h5py.File(raw_file_name, 'r') as raw:
exclude = ['dae', 'detector_1', 'name']
to_copy = [k for k in raw['/raw_data_1/instrument'] if not any([x in k for x in exclude])]
to_copy = set([k for k in raw['/raw_data_1/instrument'] if not any([x in k for x in exclude])])
if 'aperture' not in to_copy and 'mono_chopper' not in to_copy:
return
reps = [k for k in to_copy if k.startswith('rep_')]
to_copy = to_copy.difference(reps)
has_fermi = 'fermi' in to_copy
is_main_rep = False
thisrep = None
if has_fermi:
if np.abs(en0 - float(raw['/raw_data_1/instrument/fermi/energy'][()])) < (en0/20):
is_main_rep = True
elif len(reps) == 0:
return # No rep information
fermi_grp = raw['/raw_data_1/instrument/fermi/']
to_copy = to_copy.difference(set(['fermi']))
if not is_main_rep and len(reps) > 0:
thisrep = reps[np.argsort([np.abs(en0 - float(k[4:])) for k in reps])[0]]
if np.abs(en0 - float(thisrep[4:])) > (en0/20):
return # No rep information
rep_copy = [k for k in raw[f'/raw_data_1/instrument/{thisrep}']]
if 'fermi' in rep_copy:
has_fermi = True
fermi_grp = raw[f'/raw_data_1/instrument/{thisrep}/fermi']
rep_copy = rep_copy.difference(['fermi'])
n_spec = len(raw['/raw_data_1/instrument/detector_1/spectrum_index'])
with h5py.File(outfile, 'r+') as spe:
spe_root = list(spe.keys())[0]
en0 = spe[f'{spe_root}/instrument/fermi/energy'][()]
if 'fermi' in to_copy:
del spe[f'{spe_root}/instrument/fermi']
if has_fermi:
fields = set([k for k in fermi_grp]).difference(['energy', 'rotation_speed'])
for fd in fields:
src = fermi_grp[fd]
h5py.Group.copy(src, src, spe[f'{spe_root}/instrument/fermi'])
if 'rotation_speed' in fermi_grp:
spe[f'{spe_root}/instrument/fermi'].create_dataset('rotation_speed', (), dtype='float64')
spe[f'{spe_root}/instrument/fermi/rotation_speed'][()] = fermi_grp['rotation_speed'][()]
for atr in fermi_grp['rotation_speed'].attrs:
spe[f'{spe_root}/instrument/fermi/rotation_speed'].attrs[atr] = fermi_grp['rotation_speed'].attrs[atr]
if not is_main_rep and thisrep is not None:
for grp in rep_copy:
src = raw[f'/raw_data_1/instrument/{thisrep}/{grp}']
h5py.Group.copy(src, src, spe[f'{spe_root}/instrument/'])
to_copy = to_copy.difference([grp])
for grp in to_copy:
src = raw[f'/raw_data_1/instrument/{grp}']
h5py.Group.copy(src, src, spe[f'{spe_root}/instrument/'])
if 'fermi' in to_copy:
spe[f'{spe_root}/instrument/fermi/energy'][()] = en0
detroot = f'{spe_root}/instrument/detector_elements_1'
udet = np.array(raw['/raw_data_1/isis_vms_compat/UDET'])
spe.create_group(detroot)
spe[detroot].attrs['NX_class'] = np.array('NXdetector', dtype='S')
for df0, df1 in zip(['SPEC', 'UDET', 'DELT', 'LEN2', 'CODE', 'TTHE', 'UT01'], \
['det2spec', 'detector_number', 'delt', 'distance', 'detector_code', 'polar_angle', 'azimuthal_angle']):
for df0, df1 in zip(['UDET', 'DELT', 'LEN2', 'CODE', 'TTHE', 'UT01'], \
['detector_number', 'delt', 'distance', 'detector_code', 'polar_angle', 'azimuthal_angle']):
src = raw[f'/raw_data_1/isis_vms_compat/{df0}']
h5py.Group.copy(src, src, spe[detroot], df1)
for nn in range(raw['/raw_data_1/isis_vms_compat/NUSE'][0]):
src = raw[f'/raw_data_1/isis_vms_compat/UT{nn+1:02d}']
h5py.Group.copy(src, src, spe[detroot], f'user_table{nn+1:02d}')
spec2work = f'{spe_root}/instrument/detector_elements_1/spec2work'
ws = mtd[in_ws]
if n_spec == ws.getNumberHistograms():
s2w = np.arange(n_spec)
else:
nmon = np.array(raw['/raw_data_1/isis_vms_compat/NMON'])[0]
spec = np.array(raw['/raw_data_1/isis_vms_compat/SPEC'])[nmon:]
udet = np.array(raw['/raw_data_1/isis_vms_compat/UDET'])[nmon:]
_, iq = np.unique(spec, return_index=True)
s2 = np.hstack([np.array(ws.getSpectrum(ii).getDetectorIDs()) for ii in range(ws.getNumberHistograms())])
_, c1, _ = np.intersect1d(udet[iq], s2, return_indices=True)
s2w = -np.ones(iq.shape, dtype=np.int32)
s2w[c1] = np.array(ws.getIndicesFromDetectorIDs(s2[c1].tolist()))
spe[detroot].create_dataset('spec2work', s2w.shape, dtype='i4', data=s2w)
s2, i2 = [], []
for ii in range(ws.getNumberHistograms()):
d_id = ws.getSpectrum(ii).getDetectorIDs()
s2 += d_id
i2 += [ii+1] * len(d_id)
_, c1, c2 = np.intersect1d(udet, s2, return_indices=True)
d2w = -np.ones(udet.shape, dtype=np.int32)
d2w[c1] = np.array(i2)[c2]
# Loads masks and process
dI = ws.detectorInfo()
iM = np.array([dI.isMasked(ii) for ii in range(len(dI))])
spe[detroot].create_dataset('det2work', d2w.shape, dtype='i4', data=d2w)
spe[detroot].create_dataset('det_mask', iM.shape, dtype=np.bool_, data=iM)

#========================================================
# MARI specific functions
Expand Down Expand Up @@ -180,7 +213,7 @@ def shift_frame_for_mari_lowE(origEi, wsname='ws_norm', wsmon='ws_monitors'):
ws_norm = ScaleX(wsname, 20000, Operation='Add', IndexMin=0, IndexMax=ws_norm.getNumberHistograms()-1, OutputWorkspace=ws_out)
return ws_norm, ws_monitors

def gen_ana_bkg(quietws='MAR28952', target_ws=None):
def gen_ana_bkg(quietws='MAR29313', target_ws=None):
# Generates an analytic background workspace from the quiet counts data
# by fitting each spectra with a decaying exponential a*exp(-b*ToF)
if quietws not in mtd:
Expand Down Expand Up @@ -223,6 +256,20 @@ def fitfu(x, *pars):
bkg_ev = ConvertToEventWorkspace(wx)
return bkg_ev, wx

def mari_remove_ana_bkg(wsname):
if sub_ana is True and not sample_cd:
if 'bkg_ev' not in mtd:
gen_ana_bkg()
current = mtd[wsname].run().getProtonCharge()
if isinstance(mtd[wsname], mantid.dataobjects.EventWorkspace):
ws = mtd[wsname] - mtd['bkg_ev'] * current
else:
try:
ws = mtd[wsname] - mtd['bkg_his'] * current
except ValueError:
gen_ana_bkg(target_ws=mtd[wsname])
ws = mtd[wsname] - mtd['bkg_his'] * current

#========================================================
# Auto-Ei routine

Expand Down Expand Up @@ -473,19 +520,26 @@ def iliad(runno, ei, wbvan, monovan=None, sam_mass=None, sam_rmm=None, sum_runs=
ReplaceSpecialValues(wv_file, SmallNumberThreshold=1e-20, SmallNumberValue='NaN', OutputWorkspace=wv_file)
except ValueError:
run_whitevan(whitevan=wbvan, **wv_args)
ws = mtd['WV_normalised_integrals']
if ws.getNumberHistograms() == 919:
RemoveSpectra(ws, [0], OutputWorkspace=ws.name())
SaveAscii(ws, wv_file)
Ei_list = ei if hasattr(ei, '__iter__') else [ei]
if 'hard_mask_file' in kwargs:
kwargs['mask'] = kwargs.pop('hard_mask_file')
if 'sumruns' not in kwargs and sum_runs is True:
kwargs['sumruns'] = True
if monovan is not None and isinstance(monovan, (int, float)) and monovan > 0:
mv_file = f'MV_{monovan}.txt'
mvkw = {'mask':kwargs['mask']} if 'mask' in kwargs else {}
if 'inst' in kwargs: mvkw['inst'] = kwargs['inst']
mvkw = {}
for pp in [v for v in ['mask', 'inst'] if v in kwargs]:
mvkw[pp] = kwargs[pp]
try:
LoadAscii(wv_file, OutputWorkspace=wv_file)
LoadAscii(mv_file, OutputWorkspace=mv_file)
ReplaceSpecialValues(mv_file, SmallNumberThreshold=1e-20, SmallNumberValue='NaN', OutputWorkspace=mv_file)
except ValueError:
run_monovan(monovan=monovan, Ei_list=Ei_list, wv_file=wv_file, **mvkw)
kwargs['sample_mass'] = sam_mass
kwargs['sample_fwt'] = sam_rmm
kwargs['mv_file'] = mv_file
run_reduction(sample=runno, Ei_list=ei if hasattr(ei, '__iter__') else [ei], wv_file=wv_file, **kwargs)

0 comments on commit 6127d46

Please sign in to comment.