Skip to content

Commit

Permalink
Merge pull request #114 from PennLINC/accept-string-IntendedFor
Browse files Browse the repository at this point in the history
Accept string intended for
  • Loading branch information
scovitz authored Apr 16, 2021
2 parents 8ebbb5d + 3bbd15b commit e59d6d8
Show file tree
Hide file tree
Showing 3 changed files with 54 additions and 20 deletions.
48 changes: 35 additions & 13 deletions bond/bond.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import bids.layout
import json
import csv
import os
from pathlib import Path
from bids.layout import parse_file_entities
from bids.utils import listify
Expand All @@ -26,7 +27,7 @@ class BOnD(object):

def __init__(self, data_root, use_datalad=False, grouping_config=None):

self.path = data_root
self.path = os.path.abspath(data_root)
self._layout = None
self.keys_files = {}
self.fieldmaps_cached = False
Expand Down Expand Up @@ -420,14 +421,27 @@ def change_filename(self, filepath, entities):
data = json_file.get_dict()

if 'IntendedFor' in data.keys():
for item in data['IntendedFor']:
if item in _get_intended_for_reference(filepath):

# remove old filename
data['IntendedFor'].remove(item)
# add new filename
data['IntendedFor'].append(_get_intended_for_reference
(new_path))
# check if IntendedFor field is a str or list
if isinstance(data['IntendedFor'], str):
if data['IntendedFor'] == \
_get_intended_for_reference(filepath):
# replace old filename with new one (overwrite string)
data['IntendedFor'] = \
_get_intended_for_reference(new_path)

# update the json with the new data dictionary
_update_json(json_file.path, data)

if isinstance(data['IntendedFor'], list):
for item in data['IntendedFor']:
if item in _get_intended_for_reference(filepath):

# remove old filename
data['IntendedFor'].remove(item)
# add new filename
data['IntendedFor'].append(
_get_intended_for_reference(new_path))

# update the json with the new data dictionary
_update_json(json_file.path, data)

Expand Down Expand Up @@ -517,13 +531,21 @@ def _purge_associations(self, scans):
# remove scan references in the IntendedFor

if 'IntendedFor' in data.keys():
for item in data['IntendedFor']:
if item in if_scans:
data['IntendedFor'].remove(item)

# check if IntendedFor field value is a list or a string
if isinstance(data['IntendedFor'], str):
if data['IntendedFor'] in if_scans:
data['IntendedFor'] = []
# update the json with the new data dictionary
_update_json(json_file.path, data)

if isinstance(data['IntendedFor'], list):
for item in data['IntendedFor']:
if item in if_scans:
data['IntendedFor'].remove(item)

# update the json with the new data dictionary
_update_json(json_file.path, data)

# save IntendedFor purges so that you can datalad run the
# remove association file commands on a clean dataset
if self.use_datalad:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
{
"IntendedFor": [
"ses-phdiff/dwi/sub-01_ses-phdiff_acq-HASC55AP_dwi.nii.gz"
],
"IntendedFor":
"ses-phdiff/dwi/sub-01_ses-phdiff_acq-HASC55AP_dwi.nii.gz",
"AcquisitionMatrixPE": 64,
"AcquisitionNumber": 1,
"BaseResolution": 64,
Expand Down
21 changes: 17 additions & 4 deletions tests/test_bond.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,23 +115,35 @@ def test_purge_no_datalad(tmp_path):
json_name = data_root / "complete" / "sub-03" / "ses-phdiff" \
/ "func" / "sub-03_ses-phdiff_task-rest_bold.json"
scans.append(scan_name)
purge_path = str(tmp_path / "purge_scans.txt")
scans.append(data_root / "complete" / "sub-01" / "ses-phdiff/dwi/sub-01_ses-phdiff_acq-HASC55AP_dwi.nii.gz")

# create and save .txt with list of scans
purge_path = str(tmp_path / "purge_scans.txt")
with open(purge_path, 'w') as filehandle:
for listitem in scans:
filehandle.write('%s\n' % listitem)
bod = BOnD(data_root / "complete", use_datalad=False)
#bod.datalad_save()

#assert bod.is_datalad_clean()
assert Path(scan_name).exists()
assert Path(json_name).exists()

# create and save .txt with list of scans
# Check that IntendedFor purge worked
with open(str(data_root / "complete" / "sub-01" / "ses-phdiff" / "fmap" / "sub-01_ses-phdiff_acq-v4_phasediff.json")) as f:
j_dict = json.load(f)

assert "ses-phdiff/dwi/sub-01_ses-phdiff_acq-HASC55AP_dwi.nii.gz" in j_dict.values()
assert isinstance(j_dict['IntendedFor'], str)
# PURGE
bod.purge(purge_path)

with open(str(data_root / "complete" / "sub-01" / "ses-phdiff" / "fmap" / "sub-01_ses-phdiff_acq-v4_phasediff.json")) as f:
purged_dict = json.load(f)

assert not Path(scan_name).exists()
assert not Path(json_name).exists()
assert "ses-phdiff/dwi/sub-01_ses-phdiff_acq-HASC55AP_dwi.nii.gz" not in purged_dict.values()
assert isinstance(purged_dict['IntendedFor'], list)
assert purged_dict['IntendedFor'] == []

def test_purge(tmp_path):
data_root = get_data(tmp_path)
Expand Down Expand Up @@ -926,6 +938,7 @@ def test_validator(tmp_path):
# test the validator in valid dataset
call = build_validator_call(str(data_root) + "/complete")
ret = run_validator(call)
print(ret)

assert ret.returncode == 0

Expand Down

0 comments on commit e59d6d8

Please sign in to comment.