Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main'
Browse files Browse the repository at this point in the history
  • Loading branch information
paquiteau committed Feb 3, 2025
2 parents 27a4742 + 8513153 commit 8be37f0
Show file tree
Hide file tree
Showing 8 changed files with 152 additions and 15 deletions.
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@ docs_build
.mypy_cache/
.ipynb_checkpoints/
.ropeproject/

__pycache__/
*.egg-info/
_version.py

*.mrd
Expand Down
92 changes: 92 additions & 0 deletions src/cli-conf/scenario2-2d.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
# This files contains the configuration to produce the scenario 2 of the Snkf paper but with a 2D slice acquisition and reconstruction.

defaults:
- base_config
- handlers:
- activation-block
- sampler:
- stack-of-spiral
- reconstructors:
- adjoint
#- sequential
- _self_

cache_dir: ${oc.env:PWD}/cache
result_dir: results/scenario2
filename: ${cache_dir}/scenario2_2d_${engine.model}_${engine.snr}_${sampler.stack-of-spiral.constant}_${sampler.stack-of-spiral.accelz}.mrd

sim_conf:
max_sim_time: 360
seq: {TR: 50, TE: 25, FA: 12}
hardware:
n_coils: 1
dwell_time_ms: 0.001
shape: [60, 72, 60]
fov_mm: [181.0, 217.0, 181.0]

phantom:
name: brainweb
sub_id: 4
tissue_file: "tissue_7T"

handlers:
activation-block:
event_name: block_on
block_on: 20 # seconds
block_off: 20 #seconds
duration: 360 # seconds

sampler:
stack-of-spiral:
acsz: 1
accelz: 1
nb_revolutions: 10
constant: true
spiral_name: "galilean"

engine:
n_jobs: 1
chunk_size: 10
model: "simple"
snr: 10000
nufft_backend: "gpuNUFFT"
slice_2d: true

reconstructors:
adjoint:
nufft_backend: "gpuNUFFT"
density_compensation: "pipe"
# sequential:
# nufft_backend: "gpuNUFFT"
# density_compensation: false
# restart_strategy: WARM
# max_iter_per_frame: 50
# wavelet: "sym4"





hydra:
job:
chdir: true

run:
dir: ${result_dir}/outputs/${hydra.job.name}/${now:%Y-%m-%d_%H-%M-%S}
sweep:
dir: ${result_dir}/multirun/${hydra.job.name}/${now:%Y-%m-%d_%H-%M-%S}
subdir: ${hydra.job.num}

callbacks:
# gather_files:
# _target_: hydra_callbacks.MultiRunGatherer
# aggregator:
# _partial_: true
# _target_: snkf.cli.utils.aggregate_results

log_job:
_target_: hydra.experimental.callbacks.LogJobReturnCallback
latest_run:
_target_: hydra_callbacks.LatestRunLink
run_base_dir: ${result_dir}/outputs
multirun_base_dir: ${result_dir}/multirun
2 changes: 1 addition & 1 deletion src/cli-conf/scenario2.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -91,4 +91,4 @@ hydra:
latest_run:
_target_: hydra_callbacks.LatestRunLink
run_base_dir: ${result_dir}/outputs
multirun_base_dir: ${result_dir}/multirun
multirun_base_dir: ${result_dir}/multirun
12 changes: 11 additions & 1 deletion src/snake/core/engine/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,17 @@ def __call__(
`_job_model_simple` methods.
"""
# Create the base dataset
make_base_mrd(filename, sampler, phantom, sim_conf, handlers, smaps, coil_cov)
make_base_mrd(
filename,
sampler,
phantom,
sim_conf,
handlers,
smaps,
coil_cov,
self.model,
self.slice_2d,
)

# Guesstimate the workload
if worker_chunk_size <= 0:
Expand Down
8 changes: 4 additions & 4 deletions src/snake/core/sampling/samplers.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@ class NonCartesianAcquisitionSampler(BaseSampler):
obs_time_ms: int
Time spent to acquire a single shot
in_out: bool
If true, the trajectory is acquired with a double join pattern from/to
the periphery
If true, the trajectory is acquired with a double join pattern
from/to the periphery
ndim: int
Number of dimensions of the trajectory (2 or 3)
"""
Expand Down Expand Up @@ -198,8 +198,8 @@ class LoadTrajectorySampler(NonCartesianAcquisitionSampler):
obs_time_ms: int
Time spent to acquire a single shot
in_out: bool
If true, the trajectory is acquired with a double join pattern from/to
the periphery
If true, the trajectory is acquired with a double join pattern
from/to the periphery
"""

__sampler_name__ = "load-trajectory"
Expand Down
10 changes: 10 additions & 0 deletions src/snake/mrd_utils/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,6 +211,16 @@ def n_shots(self) -> int:
"""
return self.header.encoding[0].encodingLimits.kspace_encoding_step_1.maximum

@property
def engine_model(self) -> str:
"""Get the engine model."""
return self.header.userParameters.userParameterString[0].value

@property
def slice_2d(self) -> bool:
"""Is the acquisition run on 2D slices."""
return bool(self.header.userParameters.userParameterString[1].value)

#############
# Get data #
#############
Expand Down
17 changes: 14 additions & 3 deletions src/snake/mrd_utils/writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,9 @@
log = logging.getLogger(__name__)


def get_mrd_header(sim_conf: SimConfig, engine: str) -> mrd.xsd.ismrmrdHeader:
def get_mrd_header(
sim_conf: SimConfig, engine: str, model: str, slice_2d: bool
) -> mrd.xsd.ismrmrdHeader:
"""Create a MRD Header for snake-fmri data."""
H = mrd.xsd.ismrmrdHeader()
# Experimental conditions
Expand Down Expand Up @@ -78,7 +80,14 @@ def get_mrd_header(sim_conf: SimConfig, engine: str) -> mrd.xsd.ismrmrdHeader:
("rng_seed", sim_conf.rng_seed),
("max_sim_time", sim_conf.max_sim_time),
]
]
],
userParameterString=[
mrd.xsd.userParameterStringType(name=name, value=value)
for name, value in [
("engine_model", model),
("slice_2d", str(slice_2d)),
]
],
)

return H
Expand Down Expand Up @@ -204,6 +213,8 @@ def make_base_mrd(
handlers: list[AbstractHandler] | HandlerList | None = None,
smaps: NDArray | None = None,
coil_cov: NDArray | None = None,
model: str = "simple",
slice_2d: bool = False,
) -> mrd.Dataset:
"""
Create a base `.mrd` file from the simulation configurations.
Expand Down Expand Up @@ -233,7 +244,7 @@ def make_base_mrd(
pass
dataset = mrd.Dataset(filename, "dataset", create_if_needed=True)
dataset.write_xml_header(
mrd.xsd.ToXML(get_mrd_header(sim_conf, sampler.__engine__))
mrd.xsd.ToXML(get_mrd_header(sim_conf, sampler.__engine__, model, slice_2d))
)
with PerfLogger(logger=log, name="acq"):
sampler.add_all_acq_mrd(dataset, sim_conf)
Expand Down
23 changes: 18 additions & 5 deletions src/snake/toolkit/reconstructors/pysap.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,6 @@ def _reconstruct_cartesian(
): idx
for idx in range(data_loader.n_frames)
}

for future in as_completed(futures):
future.result()
pbar.update(1)
Expand All @@ -139,10 +138,15 @@ def _reconstruct_nufft(
from mrinufft import get_operator

smaps = data_loader.get_smaps()

shape = data_loader.shape
traj, kspace_data = data_loader.get_kspace_frame(0)

if data_loader.slice_2d:
shape = data_loader.shape[:2]
traj = traj.reshape(data_loader.n_shots, -1, traj.shape[-1])[0, :, :2]

kwargs = dict(
shape=data_loader.shape,
shape=shape,
n_coils=data_loader.n_coils,
smaps=smaps,
)
Expand All @@ -153,6 +157,7 @@ def _reconstruct_nufft(
kwargs["density"] = self.density_compensation
if "stacked" in self.nufft_backend:
kwargs["z_index"] = "auto"

nufft_operator = get_operator(
self.nufft_backend,
samples=traj,
Expand All @@ -165,8 +170,16 @@ def _reconstruct_nufft(

for i in tqdm(range(data_loader.n_frames)):
traj, data = data_loader.get_kspace_frame(i)
nufft_operator.samples = traj
final_images[i] = abs(nufft_operator.adj_op(data))
if data_loader.slice_2d:
nufft_operator.samples = traj.reshape(
data_loader.n_shots, -1, traj.shape[-1]
)[0, :, :2]
data = np.reshape(data, (data.shape[0], data_loader.n_shots, -1))
for j in range(data.shape[1]):
final_images[i, :, :, j] = abs(nufft_operator.adj_op(data[:, j]))
else:
nufft_operator.samples = traj
final_images[i] = abs(nufft_operator.adj_op(data))
return final_images


Expand Down

0 comments on commit 8be37f0

Please sign in to comment.