Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added z coordinates to hits dictionary. #2

Open
wants to merge 4 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
69 changes: 36 additions & 33 deletions event-display/evd_lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,47 +15,47 @@
class NonThread(object):
def __init__(self,target):
self.target = target

def start(self):
self.target()

def join(self):
pass

def is_alive(self):
return False

class keydefaultdict(defaultdict):
def __missing__(self,key):
rv = self[key] = self.default_factory(key)
return rv
return rv

class ExternalTriggerFinder(object):
'''
A class to extract external triggers from packet arrays

This class has two parameters: `pacman_trigger_enabled` and `larpix_trigger_channels`

The parameter `pacman_trigger_enabled` configures the `ExternalTriggerFinder` to
extract packets of `packet_type == 7` as external triggers

The parameter `larpix_trigger_channels` configures the `ExternalTriggerFinder` to
extract triggers on particular larpix channels as external triggers. To specify,
this parameter should be a dict of `<chip-key>: [<channel id>]` pairs. A special
chip key of `'All'` can be used in the event that all triggers on a particular
channel of any chip key should be extracted as external triggers.

You can access and set the parameters at initialization::

etf = ExternalTriggerFinder(pacman_trigger_enabled=True, larpix_trigger_channels=dict())

or via the getter/setters::

etf.get_parameters() # dict(pacman_trigger_enabled=True, larpix_trigger_channels=dict())
etf.get_parameters('pacman_trigger_enabled') # dict(pacman_trigger_enabled=True)

etf.set_parameters(pacman_trigger_enabled=True, larpix_trigger_channels={'1-1-1':[0]})

'''
def __init__(self, pacman_trigger_enabled=True, larpix_trigger_channels=None):
if larpix_trigger_channels is None:
Expand All @@ -78,12 +78,12 @@ def set_parameters(self, **kwargs):
def fit(self, events, metadata=None):
'''
Pull external triggers from hit data within each event. No metadata is used.
Trigger types are inherited from the pacman trigger type bits (with

Trigger types are inherited from the pacman trigger type bits (with
`pacman_trigger_enabled`) or are given a value of `-1` for larpix external triggers.

:returns: a list of a list of dicts (one list for each event), each dict describes a single external trigger with the following keys: `ts`-trigger timestamp, `type`-trigger type, `mask`-mask for which packets within the event are included in the trigger

'''
if metadata is None:
metadata = list()
Expand Down Expand Up @@ -138,18 +138,18 @@ def fit(self, events, metadata=None):
class TrackFitter(object):
'''
A class to extract tracks from packet arrays

You can access and set the parameters at initialization::

tf = TrackFitter(vd=1.648, clock_period=0.1, ...)

or via the getter/setters::

tf.get_parameters() # dict(vd=1.648, clock_period=0.1, ...)
tf.get_parameters('vd') # dict(vd=1.648)

tf.set_parameters(vd=1.7)

'''
def __init__(self, dbscan_eps=14, dbscan_min_samples=5, vd=1.648, clock_period=0.1,
ransac_min_samples=2, ransac_residual_threshold=8, ransac_max_trials=100):
Expand Down Expand Up @@ -179,10 +179,10 @@ def set_parameters(self, **kwargs):

vd: drift velocity [mm/us]
clock_period: clock period for timestamp [us]

dbscan_eps: epsilon used for clustering [mm]
dbscan_min_samples: min samples used for clustering

ransac_min_samples: min samples used for outlier detection
ransac_residual_threshold: residual threshold used for outlier detection [mm]
ransac_max_trials: max trials used for outlier detection
Expand Down Expand Up @@ -241,14 +241,14 @@ def _do_pca(self,xyz,mask):
def fit(self, event, metadata=None, plot=False):
'''
Extract tracks from a given event packet array

Accepts geometry metadata and external trigger metadata (optional).
Geometry should be specified as a dict of `(chip_id, channel_id)` pairs,
and external triggers should be specified with a list of dicts containing
`type` and `ts` keys.

:returns: list of dicts (one for each track) containing keys: `track_id`-unique id within event, `mask`-mask for which packets are included in track, `centroid`-x,y,z centroid of track relative to `t0`, `axis`-track x,y,z axis, `residual`-x,y,z residuals, `length`-track length, `start`-x,y,z,t of track start point, `end`-x,y,z,t of track end point, `t0`-t0 timestamp used for track

'''
if metadata is None:
metadata = dict()
Expand Down Expand Up @@ -338,7 +338,7 @@ class LArPixEVDFile(object):
'info' : None,
'hits' : [
('hid', 'i8'),
('px', 'f8'), ('py', 'f8'), ('ts', 'i8'), ('q', 'f8'),
('px', 'f8'), ('py', 'f8'), ('pz', 'f8'), ('ts', 'i8'), ('q', 'f8'),
('iochain', 'i8'), ('chipid', 'i8'), ('channelid', 'i8'),
('geom', 'i8'), ('event_ref', region_ref), ('q_raw', 'f8')
],
Expand Down Expand Up @@ -413,7 +413,7 @@ def __init__(self, filename, source_file=None, configuration_file=None, geometry
with open(self.configuration_file,'r') as infile:
for key,value in json.load(infile).items():
self.configuration[key] = value

# electron lifetime lookup
self.electron_lifetime_f = lambda unix,ts: 1.
self.electron_lifetime_file = electron_lifetime_file
Expand All @@ -428,7 +428,7 @@ def __init__(self, filename, source_file=None, configuration_file=None, geometry
infile.Close()
else:
self.electron_lifetime_file = None

self.source_file = source_file

fitter_config = fitter_config if fitter_config else dict()
Expand Down Expand Up @@ -557,7 +557,7 @@ def _parse_events_array(self):
events_dict['q_raw'] = np.array([0.])
if len(event) and len(trigs):
events_dict['ts_start'] = np.array([min(event[0]['timestamp'],trigs[0]['ts'])])
events_dict['ts_end'] = np.array([min(event[-1]['timestamp'],trigs[-1]['ts'])])
events_dict['ts_end'] = np.array([max(event[-1]['timestamp'],trigs[-1]['ts'])])
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks for catching this!

elif len(event):
events_dict['ts_start'] = np.array([event[0]['timestamp']])
events_dict['ts_end'] = np.array([event[-1]['timestamp']])
Expand Down Expand Up @@ -588,6 +588,9 @@ def _parse_events_array(self):
ped = np.array([self.pedestal[unique_id]['pedestal_mv'] for unique_id in hit_uniqueid_str])
hits_dict['px'] = xy[:,0]
hits_dict['py'] = xy[:,1]
VD = float(str(self.track_fitter.get_parameters('vd')).split(':')[1][:-1])
Copy link
Member

@peter-madigan peter-madigan Dec 9, 2020

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm hesitant to apply the drift velocity here, since it then makes it so that a drift velocity must always be required to run the evd script (even if you don't want to do the track reconstruction or the drift velocity is zero). It also is more complicated when you have an external trigger, since the z location should be relative to this timestamp. I figured that it was better to not implement the pz variable in the file and let the analysis do that. Something like this:

hit_ref = f['events'][0]['hit_ref']
ext_trig_ref = f['events'][0]['ext_trig_ref'] if f['events'][0]['n_ext_trigs'] > 0 else None
z_scale = f['tracks'].attrs['z_scale']
if ext_trig_ref is not None:
    pz = (f['hits'][hit_ref]['ts'] - f['ext_trigs'][ext_trig_ref][0]['ts']) * z_scale

But... if you're willing to add some logic to this to check for

  • are you using a track fitter? -> if not, don't fill pz
  • are there external triggers in the event? -> if not, don't fill pz

and then do the timestamp relative to the external trigger then I will merge it in.

hits_dict['pz'] = (event['timestamp']-events_dict['ts_start'])*VD
# event['timestamp'] = hit's timestamp; events_dict['ts_start'] = event's start timestamp; VD = drift velocity [cm/us]
q_raw = event['dataword']/256. * (vref-vcm) + vcm - ped
hits_dict['q_raw'] = q_raw
hits_dict['q'] = q_raw
Expand Down
6 changes: 3 additions & 3 deletions event-display/to_evd_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def split_at_timestamp(timestamp,event,*args):
Breaks event into two arrays at index where event['timestamp'] > timestamp
Additional arrays can be specified with kwargs and will be split at the same
index

:returns: tuple of two event halves followed by any additional arrays (in pairs)
'''
args = list(args)
Expand Down Expand Up @@ -132,7 +132,7 @@ def main(in_filename, out_filename, *args,
# load a buffer of data
packet_buffer = np.copy(packets[mask][start_idx:min(end_idx,n_packets)])
packet_buffer = np.insert(packet_buffer, [0], last_unix_ts)

# find unix timestamp groups
ts_mask = packet_buffer['packet_type'] == 4
ts_grps = np.split(packet_buffer, np.argwhere(ts_mask).flatten())
Expand All @@ -155,7 +155,7 @@ def main(in_filename, out_filename, *args,
event_idx = np.argwhere(np.abs(packet_dt) > event_dt).flatten()
events = np.split(packet_buffer, event_idx)
event_unix_ts = np.split(unix_ts, event_idx)

for idx, event, unix_ts in zip(event_idx, events[:-1], event_unix_ts[:-1]):
if idx == 0:
while len(event_buffer) >= nhit_cut:
Expand Down