Skip to content

Commit

Permalink
minor tweaks to testing
Browse files Browse the repository at this point in the history
  • Loading branch information
mattcleigh committed Mar 8, 2023
1 parent 3bfdc83 commit 009362a
Show file tree
Hide file tree
Showing 8 changed files with 16 additions and 13 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,8 @@ ipython_config.py
# install all needed dependencies.
#Pipfile.lock

.vscode*

# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/

Expand Down
Empty file added .project-root
Empty file.
4 changes: 2 additions & 2 deletions configs/datamodule/default.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ data_conf:
jet_type: t
data_dir: ${paths.data_dir}
num_particles: 30
split_fraction: [0.7, 0.0, 0.3]
split_fraction: [0.9, 0.0, 0.1]
particle_features:
- etarel
- phirel
Expand All @@ -20,5 +20,5 @@ data_conf:
loader_kwargs:
pin_memory: true
batch_size: 256
num_workers: 2
num_workers: 16
drop_last: True
14 changes: 7 additions & 7 deletions configs/model/default.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,26 +22,26 @@ normaliser_config:
# Full transformer encoder model
trans_enc_config:
te_config:
model_dim: 64
num_layers: 2
model_dim: 128
num_layers: 4
mha_config:
num_heads: 8
dense_config:
hddn_dim: 64
hddn_dim: 256
act_h: lrlu
nrm: layer
node_embd_config:
num_blocks: 1
hddn_dim: 64
hddn_dim: 256
act_h: lrlu
nrm: layer
outp_embd_config:
hddn_dim: 64
hddn_dim: 256
act_h: lrlu
nrm: layer
ctxt_embd_config:
outp_dim: 64
hddn_dim: 64
hddn_dim: 128
num_blocks: 2
act_h: lrlu
nrm: layer

Expand Down
4 changes: 2 additions & 2 deletions configs/paths/default.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# output dir will be created by hydra
data_dir: /srv/beegfs/scratch/groups/rodem/datasets/jetnet/
output_dir: /srv/beegfs/scratch/groups/rodem/jet_diffusion/checkpoints
data_dir: /home/matthew/Documents/data/jetnet/
output_dir: /home/matthew/Documents/saved_networks

# Interpolated
root_dir: ${oc.env:PROJECT_ROOT}
Expand Down
2 changes: 1 addition & 1 deletion configs/trainer/default.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ accelerator: auto
devices: 1
gradient_clip_val: 5
precision: 32
check_val_every_n_epoch: 1
check_val_every_n_epoch: 10

# Interpolated
default_root_dir: ${paths.full_path}
1 change: 1 addition & 0 deletions scripts/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
version_base=None, config_path=str(root / "configs"), config_name="train.yaml"
)
def main(cfg: DictConfig) -> None:

log.info("Setting up full job config")
if cfg.full_resume:
cfg = reload_original_config(cfg)
Expand Down
2 changes: 1 addition & 1 deletion src/models/pc_jedi.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ def validation_step(self, sample: tuple, batch_idx: int) -> None:
)

# Add to the collection of the validaiton outputs
self.val_outs.append(to_np(outputs), to_np(sample))
self.val_outs.append((to_np(outputs), to_np(sample)))

def on_validation_epoch_end(self) -> None:
"""At the end of the validation epoch, calculate and log the metrics
Expand Down

0 comments on commit 009362a

Please sign in to comment.