Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Several bug fixes and minor additions #191

Closed
wants to merge 28 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
8567ebf
Added pid metrics post-processor for evaluating single particle clust…
Mar 2, 2024
baca940
Clean singlep.py, added classification loss functions for imbalanced …
Mar 2, 2024
35b70c8
WIP Restore and Clean CNN and Mixed Encoder
Mar 2, 2024
efea3c8
Added PID Transformers, fixed MixedEncoder, added standalone multipar…
Mar 3, 2024
aa8bd56
Fixed NodeTypeLoss crashing due to Kaon Label
Mar 3, 2024
777582a
Transformer models fixed, trainable
Mar 3, 2024
1c9109f
Fix to fragment_clusts ragged array for container update to np 1.24
Mar 4, 2024
bfbf7c0
WIP Constraint Satisfaction for PID
Mar 6, 2024
d71b317
Constraint Satisfaction Completed, waiting for inference result
Mar 7, 2024
7626aa8
Added satisfiability to logger
Mar 7, 2024
0d37a61
Idiotic mistake in Vertex Constraint
Mar 7, 2024
1b746e3
Remove print statement
Mar 7, 2024
daa0170
CSAT bugfix and update
Mar 12, 2024
d22be34
Transformers fix in normalization and activation layer
Mar 12, 2024
b7bf8dd
Merge with upstream
Apr 29, 2024
f9bd673
Resolve TruthParticle merge conflict
Apr 29, 2024
8564686
Merge with francois develop
Apr 30, 2024
0f0e632
Merge branch 'develop' of https://github.com/francois-drielsma/lartpc…
Apr 30, 2024
4ae1a3f
Added few scripts for track dqdx and shower conversion distance calcu…
May 17, 2024
ecd2479
Added script to only dump interaction csv
May 17, 2024
81e37cf
Added pid metric script and __init__ function update
May 17, 2024
b6aa0c7
Added shower conversion distance cut as post-processor
May 17, 2024
92e61a2
Fixed chain_config issue when trying to run anatools on-the-fly with ML
May 17, 2024
8c1acb9
Fixed bug when handling truth particles with empty predicted nonghost…
May 17, 2024
29e11b0
Merge calibration tools with develop
May 17, 2024
d0223dc
Resolve merge conflict mlreco/utils/calibration
May 17, 2024
d1f190d
Force contiguous and float32 array for Particle.depositions
May 17, 2024
3738da2
Added simple convenience method for printing out neutrino information…
May 17, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ __pycache__/
config
analysis/config
mlreco/models/cluster_cnn/deprecated
mlreco/models/experimental/cluster/pointnet2
# C extensions
*.so

Expand Down
15 changes: 14 additions & 1 deletion analysis/classes/Interaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,8 @@ def __init__(self,
is_fiducial: bool = False,
is_ccrosser: bool = False,
coffset: float = -np.inf,
units: str = 'px', **kwargs):
units: str = 'px',
satisfiability: float = -1., **kwargs):

# Initialize attributes
self.id = int(interaction_id)
Expand Down Expand Up @@ -136,6 +137,9 @@ def __init__(self,
self.crthit_matched = crthit_matched
self.crthit_matched_particle_id = crthit_matched_particle_id
self.crthit_id = crthit_id

# CST quantities
self._satisfiability = satisfiability

@property
def size(self):
Expand Down Expand Up @@ -263,6 +267,7 @@ def _update_particle_info(self):
self._num_particles = np.sum(self._particle_counts)
self._num_primaries = np.sum(self._primary_counts)


@property
def particle_ids(self):
return self._particle_ids
Expand Down Expand Up @@ -342,6 +347,14 @@ def convert_to_cm(self, meta):
@property
def units(self):
return self._units

@property
def satisfiability(self):
return self._satisfiability

@satisfiability.setter
def satisfiability(self, other):
self._satisfiability = other


# ------------------------------Helper Functions---------------------------
Expand Down
2 changes: 1 addition & 1 deletion analysis/classes/Particle.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ def __init__(self,
self.index = index
self.points = points
self.sources = sources
self.depositions = depositions
self.depositions = np.ascontiguousarray(depositions).astype(np.float32)

self.pdg_code = -1

Expand Down
27 changes: 24 additions & 3 deletions analysis/classes/TruthInteraction.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from .Interaction import _process_interaction_attributes

from mlreco.utils import pixel_to_cm
from mlreco.utils.globals import PID_LABELS
from mlreco.utils.globals import PID_LABELS, NU_CURR_TYPE, NU_INT_TYPE
from mlreco.utils.decorators import inherit_docstring

@inherit_docstring(Interaction)
Expand Down Expand Up @@ -106,8 +106,8 @@ def __init__(self,

if self._particles is None:
self._depositions_MeV = depositions_MeV
self._truth_depositions = truth_depositions
self._truth_depositions_MeV = truth_depositions_MeV
self._truth_depositions = truth_depositions.astype(np.float32)
self._truth_depositions_MeV = truth_depositions_MeV.astype(np.float32)
self.truth_points = truth_points
self.truth_index = truth_index

Expand Down Expand Up @@ -341,6 +341,27 @@ def __repr__(self):
def __str__(self):
msg = super(TruthInteraction, self).__str__()
return 'Truth'+msg

@property
def satisfiability(self):
raise ValueError("Satisfiability is a reco quantity and is not defined for TruthInteractions")

def nu_info(self):
'''
Simple printout of the neutrino information (if it exists)
'''

msg = f'''
Neutrino Interaction: {self.nu_pdg_code}
------------------------
Interaction Type : {NU_INT_TYPE[self.nu_interaction_type]}
Interaction Mode : {self.nu_interaction_mode}
Current Type : {NU_CURR_TYPE[self.nu_current_type]}
Nu Initial Energy: {self.nu_energy_init * 1000:.2f} MeV
True Topology : {self.truth_topology}
Visible Topology : {self.topology}
'''
return msg


# ------------------------------Helper Functions---------------------------
Expand Down
10 changes: 5 additions & 5 deletions analysis/classes/TruthParticle.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,16 +71,16 @@ def __init__(self,
self._sed_index = None

# Set attributes
self._depositions_MeV = np.atleast_1d(depositions_MeV)
self._depositions_MeV = np.atleast_1d(depositions_MeV).astype(np.float32)

self.truth_index = truth_index
self._truth_points = truth_points
self._truth_depositions = np.atleast_1d(truth_depositions) # Must be ADC
self._truth_depositions_MeV = np.atleast_1d(truth_depositions_MeV) # Must be MeV
self._truth_points = truth_points.astype(np.float32)
self._truth_depositions = np.atleast_1d(truth_depositions).astype(np.float32) # Must be ADC
self._truth_depositions_MeV = np.atleast_1d(truth_depositions_MeV).astype(np.float32) # Must be MeV

self.sed_index = sed_index
self._sed_points = sed_points
self._sed_depositions_MeV = np.atleast_1d(sed_depositions_MeV)
self._sed_depositions_MeV = np.atleast_1d(sed_depositions_MeV).astype(np.float32)

# Load truth information from the true particle object
self.truth_momentum = np.copy(truth_momentum)
Expand Down
68 changes: 41 additions & 27 deletions analysis/classes/builders.py
Original file line number Diff line number Diff line change
Expand Up @@ -428,6 +428,7 @@ def _build_truth(self,
sed_index = np.where(mask_sed)[0]
else:
mask_sed, sed_index = np.array([]), np.array([])

if np.count_nonzero(mask_nonghost) <= 0:
continue # Skip larcv particles with no true depositions
# 1. Check if current pid is one of the existing group ids
Expand Down Expand Up @@ -515,6 +516,9 @@ def _build_truth(self,
particle.start_point = particle.first_step
if particle.semantic_type == TRACK_SHP:
particle.end_point = particle.last_step

assert particle.points.shape[0] == particle.depositions.shape[0]
assert particle.truth_points.shape[0] == particle.truth_depositions.shape[0]

out.append(particle)

Expand Down Expand Up @@ -729,7 +733,8 @@ def handle_empty_truth_particles(labels_noghost,
entry,
verbose=False,
sed=None,
mask_sed=None):
mask_sed=None,
energy_label=None):
"""
Function for handling true larcv::Particle instances with valid
true nonghost voxels but with no predicted nonghost voxels.
Expand Down Expand Up @@ -757,30 +762,35 @@ def handle_empty_truth_particles(labels_noghost,
coords, depositions, voxel_indices = np.empty((0,3)), np.array([]), np.array([])
coords_noghost, depositions_noghost = np.empty((0,3)), np.array([])
sed_index, sed_points, sed_depositions_MeV = np.array([]), np.empty((0,3)), np.array([])
if np.count_nonzero(mask_noghost) > 0:
if sed is not None:
sed_points = sed[mask_sed][:, COORD_COLS]
sed_index = np.where(mask_sed)[0]
sed_depositions_MeV = sed[mask_sed][:, VALUE_COL]
coords_noghost = labels_noghost[mask_noghost][:, COORD_COLS]
true_voxel_indices = np.where(mask_noghost)[0]
depositions_noghost = labels_noghost[mask_noghost][:, VALUE_COL].squeeze()
truth_labels = get_truth_particle_labels(labels_noghost,
mask_noghost,
id=id,
verbose=verbose)

semantic_type = int(truth_labels[0])
#interaction_id = int(truth_labels[1])
interaction_id = p.interaction_id()
nu_id = int(truth_labels[2])
pid = int(truth_labels[3])
primary_id = int(truth_labels[4])
is_primary = bool(int(primary_id) == 1)

volume_id, cts = np.unique(labels_noghost[:, BATCH_COL][mask_noghost].astype(int),
return_counts=True)
volume_id = int(volume_id[cts.argmax()])

if sed is not None:
sed_points = sed[mask_sed][:, COORD_COLS]
sed_index = np.where(mask_sed)[0]
sed_depositions_MeV = sed[mask_sed][:, VALUE_COL]
coords_noghost = labels_noghost[mask_noghost][:, COORD_COLS]
true_voxel_indices = np.where(mask_noghost)[0]
depositions_noghost = labels_noghost[mask_noghost][:, VALUE_COL].squeeze()
truth_labels = get_truth_particle_labels(labels_noghost,
mask_noghost,
id=id,
verbose=verbose)

semantic_type = int(truth_labels[0])
#interaction_id = int(truth_labels[1])
interaction_id = p.interaction_id()
nu_id = int(truth_labels[2])
pid = int(truth_labels[3])
primary_id = int(truth_labels[4])
is_primary = bool(int(primary_id) == 1)

volume_id, cts = np.unique(labels_noghost[:, BATCH_COL][mask_noghost].astype(int),
return_counts=True)
volume_id = int(volume_id[cts.argmax()])

if energy_label is not None:
truth_depositions_MeV = energy_label[mask_noghost][:, VALUE_COL]
else:
truth_depositions_MeV = np.empty(0, dtype=np.float32)

particle = TruthParticle(group_id=id,
interaction_id=interaction_id,
Expand All @@ -795,15 +805,19 @@ def handle_empty_truth_particles(labels_noghost,
depositions_MeV=np.empty(0, dtype=np.float32),
truth_index=true_voxel_indices,
truth_points=coords_noghost,
truth_depositions=np.empty(0, dtype=np.float32), #TODO
truth_depositions_MeV=depositions_noghost,
truth_depositions=depositions_noghost,
truth_depositions_MeV=truth_depositions_MeV,
is_primary=is_primary,
sed_index=sed_index.astype(np.int64),
sed_points=sed_points.astype(np.float32),
sed_depositions_MeV=sed_depositions_MeV.astype(np.float32),
particle_asis=p,
start_point=-np.ones(3, dtype=np.float32),
end_point=-np.ones(3, dtype=np.float32))

assert particle.truth_points.shape[0] == particle.truth_depositions.shape[0]
assert particle.truth_points.shape[0] > 0

return particle


Expand Down
8 changes: 4 additions & 4 deletions analysis/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,9 +149,9 @@ def initialize_base(self,
# Load the full chain configuration, if it is provided
self.chain_config = chain_config
if chain_config is not None:
#cfg = yaml.safe_load(open(chain_config, 'r').read())
process_config(chain_config, verbose=False)
self.chain_config = chain_config
cfg = yaml.safe_load(open(chain_config, 'r').read())
process_config(cfg, verbose=False)
self.chain_config = cfg

# Initialize data product builders
self.builders = {}
Expand Down Expand Up @@ -628,7 +628,7 @@ def write(self, ana_output):
self.csv_writers = {}

for script_name, fname_to_update_list in ana_output.items():
append = self.scripts[script_name]['logger'].get('append', False)
append = self.scripts[script_name].get('append', False)
filenames = list(fname_to_update_list.keys())
if len(filenames) != len(set(filenames)):
msg = f'Duplicate filenames: {str(filenames)} in {script_name} '\
Expand Down
17 changes: 17 additions & 0 deletions analysis/post_processing/csp/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# Constraint Satisfaction for PID and Primary Prediction

## I. Usage

A *constraint* $C$ on some variable $X$ limits the possible values that $X$ can
assume in its domain. For example, suppose we have a `Particle` instance `emshower` that have `semantic_label == 1`:
```python
print(emshower.semantic_type)
1
```
Let's make a constraint `ParticleSemanticConstraint`

Usually, we want to restrict a Particle's type and primary label based on heuristics that are well-grounded in physics.

```

```
1 change: 1 addition & 0 deletions analysis/post_processing/csp/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
from .csat_processor import CSATProcessor
Loading
Loading