Skip to content

Commit

Permalink
Merge branch 'master' into bf-visualize-fodf
Browse files Browse the repository at this point in the history
  • Loading branch information
CHrlS98 committed Dec 12, 2024
2 parents 0923930 + e256482 commit e9d73a8
Show file tree
Hide file tree
Showing 10 changed files with 339 additions and 170 deletions.
4 changes: 2 additions & 2 deletions scilpy/io/streamlines.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,8 +75,8 @@ def load_tractogram_with_reference(parser, args, filepath, arg_name=None):
filepath: str
Path of the tractogram file.
arg_name: str, optional
Name of the reference argument. By default the args.ref is used. If
arg_name is given, then args.arg_name_ref will be used instead.
Name of the reference argument. By default the args.reference is used.
If arg_name is given, then args.arg_name_ref will be used instead.
"""
if is_argument_set(args, 'bbox_check'):
bbox_check = args.bbox_check
Expand Down
2 changes: 1 addition & 1 deletion scilpy/tractograms/dps_and_dpp_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,7 @@ def project_dpp_to_map(sft, dpp_key, sum_lines=False, endpoints_only=False):
for p in points:
x, y, z = sft.streamlines[s][p, :].astype(int) # Or floor
count[x, y, z] += 1
the_map[x, y, z] += sft.data_per_point[dpp_key][s][p]
the_map[x, y, z] += np.squeeze(sft.data_per_point[dpp_key][s][p])

if not sum_lines:
count = np.maximum(count, 1e-6) # Avoid division by 0
Expand Down
7 changes: 4 additions & 3 deletions scripts/scil_NODDI_maps.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,12 +120,13 @@ def main():
shells_centroids, indices_shells = identify_shells(bvals, args.tolerance,
round_centroids=True)

nb_shells = len(shells_centroids)
non_b0_shells = shells_centroids[shells_centroids > args.tolerance]
nb_shells = len(non_b0_shells)
if nb_shells <= 1:
raise ValueError("Amico's NODDI works with data with more than one "
"shell, but you seem to have single-shell data (we "
"found shells {}). Change tolerance if necessary."
.format(np.sort(shells_centroids)))
"found shell {}). Change tolerance if necessary."
.format(non_b0_shells[0]))

logging.info('Will compute NODDI with AMICO on {} shells at found at {}.'
.format(len(shells_centroids), np.sort(shells_centroids)))
Expand Down
5 changes: 4 additions & 1 deletion scripts/scil_freewater_maps.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
# -*- coding: utf-8 -*-

"""
Compute Free Water maps [1] using AMICO.
Compute Free Water maps [1] using the AMICO framework [2].
This script supports both single and multi-shell data.
Formerly: scil_compute_freewater.py
Expand Down Expand Up @@ -35,6 +35,9 @@
[1] Pasternak 0, Sochen N, Gur Y, Intrator N, Assaf Y.
Free water elimination and mapping from diffusion mri.
Magn Reson Med. 62 (3) (2009) 717-730.
[2] Daducci A, et al. Accelerated microstructure imaging
via convex optimization (AMICO) from diffusion MRI data.
Neuroimage 105 (2015) 32-44.
"""


Expand Down
83 changes: 0 additions & 83 deletions scripts/scil_tractogram_add_dps.py

This file was deleted.

3 changes: 2 additions & 1 deletion scripts/scil_tractogram_dpp_math.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,8 @@
If endpoints_only and dps mode is set operation will be calculated across the
data at the endpoints and stored as a single value (or array in the 4D case)
per streamline.
per streamline. If you wish to perform operations on dps values, please use
scil_tractogram_dps_math.py.
Endpoint only operation:
correlation: correlation calculated between arrays extracted from streamline
Expand Down
153 changes: 153 additions & 0 deletions scripts/scil_tractogram_dps_math.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,153 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

"""
Import, extract or delete dps (data_per_streamline) information to a tractogram
file. Can be for example SIFT2 weights, processing information, bundle IDs,
tracking seeds, etc.
This script is not the same as the dps mode of scil_tractogram_dpp_math.py,
which performs operations on dpp (data_per_point) and saves the result as dps.
Instead this script performs operations directly on dps values.
Input and output tractograms must be .trk, unless you are using the 'import'
operation, in which case a .tck input tractogram is accepted.
Usage examples:
> scil_tractogram_dps_math.py tractogram.trk import "bundle_ids"
--in_dps_file my_bundle_ids.txt
> scil_tractogram_dps_math.py tractogram.trk export "seeds"
--out_dps_file seeds.npy
"""

import nibabel as nib
import argparse
import logging

from dipy.io.streamline import save_tractogram, load_tractogram
from scilpy.io.streamlines import load_tractogram_with_reference
import numpy as np

from scilpy.io.utils import (add_overwrite_arg,
add_verbose_arg,
assert_inputs_exist,
assert_outputs_exist,
check_tract_trk,
load_matrix_in_any_format,
save_matrix_in_any_format)


def _build_arg_parser():

p = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawTextHelpFormatter)

p.add_argument('in_tractogram',
help='Input tractogram (.trk for all operations,'
'.tck accepted for import).')
p.add_argument('operation', metavar='OPERATION',
choices=['import', 'delete', 'export'],
help='The type of operation to be performed on the\n'
'tractogram\'s data_per_streamline at the given\n'
'key. Must be one of the following: [%(choices)s].\n'
'The additional arguments required for each\n'
'operation are specified under each group below.')
p.add_argument('dps_key', type=str,
help='Key name used for the operation.')

p.add_argument('--out_tractogram',
help='Output tractogram (.trk). Required for "import" and\n'
'"delete" operations.')

import_args = p.add_argument_group('Operation "import" mandatory options')
import_args.add_argument('--in_dps_file',
help='File containing the data to import to\n'
'streamlines (.txt, .npy or .mat).')

export_args = p.add_argument_group('Operation "export" mandatory options')
export_args.add_argument('--out_dps_file',
help='File in which the extracted data will be\n'
'saved (.txt or .npy).')

add_verbose_arg(p)
add_overwrite_arg(p)

return p


def main():
parser = _build_arg_parser()
args = parser.parse_args()
logging.getLogger().setLevel(logging.getLevelName(args.verbose))

if args.operation == 'import':
if not nib.streamlines.is_supported(args.in_tractogram):
parser.error('Invalid input streamline file format (must be trk ' +
'or tck): {0}'.format(args.in_tractogram))
else:
check_tract_trk(parser, args.in_tractogram)

if args.out_tractogram:
check_tract_trk(parser, args.out_tractogram)

assert_inputs_exist(parser, args.in_tractogram, args.in_dps_file)
assert_outputs_exist(parser, args, [], optional=[args.out_dps_file,
args.out_tractogram])

sft = load_tractogram_with_reference(parser, args, args.in_tractogram)

if args.operation == 'import':
if args.in_dps_file is None:
parser.error('The --in_dps_file option is required for ' +
'the "import" operation.')

if args.out_tractogram is None:
parser.error('The --out_tractogram option is required for ' +
'the "import" operation.')

# Make sure the user is not unwillingly overwritting dps
if (args.dps_key in sft.get_data_per_streamline_keys() and
not args.overwrite):
parser.error('"{}" already in data per streamline. Use -f to force'
' overwriting.'.format(args.dps_key))

# Load data and remove extraneous dimensions
data = np.squeeze(load_matrix_in_any_format(args.in_dps_file))

# Quick check as the built-in error from sft is not too explicit
if len(sft) != data.shape[0]:
raise ValueError('Data must have as many entries ({}) as there are'
' streamlines ({}).'.format(data.shape[0],
len(sft)))

sft.data_per_streamline[args.dps_key] = data

save_tractogram(sft, args.out_tractogram)

if args.operation == 'delete':
if args.out_tractogram is None:
parser.error('The --out_tractogram option is required for ' +
'the "delete" operation.')

del sft.data_per_streamline[args.dps_key]

save_tractogram(sft, args.out_tractogram)

if args.operation == 'export':
if args.out_dps_file is None:
parser.error('The --out_dps_file option is required for ' +
'the "export" operation.')

# Extract data and reshape
if args.dps_key not in sft.data_per_streamline.keys():
raise ValueError('Data does not have any data_per_streamline'
' entry stored at this key: {}'
.format(args.dps_key))

data = np.squeeze(sft.data_per_streamline[args.dps_key])
save_matrix_in_any_format(args.out_dps_file, data)


if __name__ == '__main__':
main()
22 changes: 20 additions & 2 deletions scripts/tests/test_NODDI_maps.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@
from scilpy.io.fetcher import fetch_data, get_testing_files_dict

# If they already exist, this only takes 5 seconds (check md5sum)
fetch_data(get_testing_files_dict(), keys=['commit_amico.zip'])
fetch_data(get_testing_files_dict(), keys=['commit_amico.zip',
'processing.zip'])
tmp_dir = tempfile.TemporaryDirectory()


Expand All @@ -32,5 +33,22 @@ def test_execution_commit_amico(script_runner, monkeypatch):
'--out_dir', 'noddi', '--tol', '30',
'--para_diff', '0.0017', '--iso_diff', '0.003',
'--lambda1', '0.5', '--lambda2', '0.001',
'--processes', '1')
'--processes', '1', '-f')
assert ret.success


def test_single_shell_fail(script_runner, monkeypatch):
monkeypatch.chdir(os.path.expanduser(tmp_dir.name))
in_dwi = os.path.join(SCILPY_HOME, 'processing',
'dwi_crop_1000.nii.gz')
in_bval = os.path.join(SCILPY_HOME, 'processing',
'1000.bval')
in_bvec = os.path.join(SCILPY_HOME, 'processing',
'1000.bvec')
ret = script_runner.run('scil_NODDI_maps.py', in_dwi,
in_bval, in_bvec,
'--out_dir', 'noddi', '--tol', '30',
'--para_diff', '0.0017', '--iso_diff', '0.003',
'--lambda1', '0.5', '--lambda2', '0.001',
'--processes', '1', '-f')
assert not ret.success
Loading

0 comments on commit e9d73a8

Please sign in to comment.