Skip to content

Commit

Permalink
Merge pull request dipy#1686 from skoudoro/standardize-reconst-workflow
Browse files Browse the repository at this point in the history
Standardize workflow
  • Loading branch information
Garyfallidis authored Dec 11, 2018
2 parents 1184387 + 4d400d7 commit 52cb7c6
Show file tree
Hide file tree
Showing 8 changed files with 92 additions and 180 deletions.
2 changes: 1 addition & 1 deletion dipy/io/image.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ def load_nifti(fname, return_img=False, return_voxsize=False,
img = nib.load(fname)
data = img.get_data()
vox_size = img.header.get_zooms()[:3]

ret_val = [data, img.affine]

if return_img:
Expand Down
10 changes: 3 additions & 7 deletions dipy/workflows/denoise.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,7 @@
import logging
import shutil

import nibabel as nib

from dipy.io.image import load_nifti, save_nifti
from dipy.denoise.nlmeans import nlmeans
from dipy.denoise.noise_estimate import estimate_sigma
from dipy.workflows.workflow import Workflow
Expand Down Expand Up @@ -43,17 +42,14 @@ def run(self, input_files, sigma=0, out_dir='',
logging.warning('Denoising skipped for now.')
else:
logging.info('Denoising {0}'.format(fpath))
image = nib.load(fpath)
data = image.get_data()
data, affine, image = load_nifti(fpath, return_img=True)

if sigma == 0:
logging.info('Estimating sigma')
sigma = estimate_sigma(data)
logging.debug('Found sigma {0}'.format(sigma))

denoised_data = nlmeans(data, sigma)
denoised_image = nib.Nifti1Image(
denoised_data, image.affine, image.header)
save_nifti(odenoised, denoised_data, affine, image.header)

denoised_image.to_filename(odenoised)
logging.info('Denoised volume saved as {0}'.format(odenoised))
1 change: 0 additions & 1 deletion dipy/workflows/mask.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
#!/usr/bin/env python
from __future__ import division

import inspect
import logging
import numpy as np

Expand Down
172 changes: 55 additions & 117 deletions dipy/workflows/reconst.py

Large diffs are not rendered by default.

76 changes: 27 additions & 49 deletions dipy/workflows/stats.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,13 @@
#!/usr/bin/env python

import logging
import shutil
import numpy as np
import nibabel as nib
import sys
import os
import json
from scipy.ndimage.morphology import binary_dilation

from dipy.io import read_bvals_bvecs
from dipy.io.image import load_nifti, save_nifti
from dipy.core.gradients import gradient_table
from dipy.segment.mask import median_otsu
from dipy.reconst.dti import TensorModel
Expand All @@ -26,25 +24,24 @@ class SNRinCCFlow(Workflow):
def get_short_name(cls):
return 'snrincc'

def run(self, data_file, data_bvals, data_bvecs, mask=None,
bbox_threshold=(0.6, 1, 0, 0.1, 0, 0.1), out_dir='',
def run(self, data_files, bvals_files, bvecs_files, mask_files,
bbox_threshold=[0.6, 1, 0, 0.1, 0, 0.1], out_dir='',
out_file='product.json', out_mask_cc='cc.nii.gz',
out_mask_noise='mask_noise.nii.gz'):
""" Workflow for computing the signal-to-noise ratio in the
corpus callosum
"""Compute the signal-to-noise ratio in the corpus callosum.
Parameters
----------
data_file : string
data_files : string
Path to the dwi.nii.gz file. This path may contain wildcards to
process multiple inputs at once.
data_bvals : string
bvals_files : string
Path of bvals.
data_bvecs : string
bvecs_files : string
Path of bvecs.
mask : string, optional
Path of mask if desired. (default None)
bbox_threshold : string, optional
mask_files : string
Path of brain mask
bbox_threshold : variable float, optional
Threshold for bounding box, values separated with commas for ex.
[0.6,1,0,0.1,0,0.1]. (default (0.6, 1, 0, 0.1, 0, 0.1))
out_dir : string, optional
Expand All @@ -56,47 +53,28 @@ def run(self, data_file, data_bvals, data_bvecs, mask=None,
out_mask_noise : string, optional
Name of the mask noise volume to be saved
(default 'mask_noise.nii.gz')
"""

if not isinstance(bbox_threshold, tuple):
b = bbox_threshold.replace("[", "")
b = b.replace("]", "")
b = b.replace("(", "")
b = b.replace(")", "")
b = b.replace(" ", "")
b = b.split(",")
for i in range(len(b)):
b[i] = float(b[i])
bbox_threshold = tuple(b)
"""
io_it = self.get_io_iterator()

for data_path, data_bvals_path, data_bvecs_path, out_path, \
for dwi_path, bvals_path, bvecs_path, mask_path, out_path, \
cc_mask_path, mask_noise_path in io_it:
img = nib.load('{0}'.format(data_path))
bvals, bvecs = read_bvals_bvecs('{0}'.format(
data_bvals_path), '{0}'.format(data_bvecs_path))
gtab = gradient_table(bvals, bvecs)

data = img.get_data()
affine = img.affine
data, affine = load_nifti(dwi_path)
bvals, bvecs = read_bvals_bvecs(bvals_path, bvecs_path)
gtab = gradient_table(bvals=bvals, bvecs=bvecs)

logging.info('Computing brain mask...')
b0_mask, calc_mask = median_otsu(data)
_, calc_mask = median_otsu(data)

if mask is None:
mask = calc_mask
else:
mask = nib.load(mask).get_data().astype(bool)
mask = np.array(calc_mask == mask).astype(int)
mask, affine = load_nifti(mask_path)
mask = np.array(calc_mask == mask.astype(bool)).astype(int)

logging.info('Computing tensors...')
tenmodel = TensorModel(gtab)
tensorfit = tenmodel.fit(data, mask=mask)

logging.info(
'Computing worst-case/best-case SNR using the CC...')
threshold = bbox_threshold

if np.ndim(data) == 4:
CC_box = np.zeros_like(data[..., 0])
Expand All @@ -116,22 +94,22 @@ def run(self, data_file, data_bvals, data_bvecs, mask=None,
bounds_min[1]:bounds_max[1],
bounds_min[2]:bounds_max[2]] = 1

mask_cc_part, cfa = segment_from_cfa(tensorfit, CC_box, threshold,
if len(bbox_threshold) != 6:
raise IOError('bbox_threshold should have 6 float values')

mask_cc_part, cfa = segment_from_cfa(tensorfit, CC_box,
bbox_threshold,
return_cfa=True)

cfa_img = nib.Nifti1Image((cfa*255).astype(np.uint8), affine)
mask_cc_part_img = nib.Nifti1Image(
mask_cc_part.astype(np.uint8), affine)
nib.save(mask_cc_part_img, cc_mask_path)
save_nifti(cc_mask_path, mask_cc_part.astype(np.uint8), affine)
logging.info('CC mask saved as {0}'.format(cc_mask_path))

mean_signal = np.mean(data[mask_cc_part], axis=0)
mask_noise = binary_dilation(mask, iterations=10)
mask_noise[..., :mask_noise.shape[-1]//2] = 1
mask_noise = ~mask_noise
mask_noise_img = nib.Nifti1Image(
mask_noise.astype(np.uint8), affine)
nib.save(mask_noise_img, mask_noise_path)

save_nifti(mask_noise_path, mask_noise.astype(np.uint8), affine)
logging.info('Mask noise saved as {0}'.format(mask_noise_path))

noise_std = np.std(data[mask_noise, :])
Expand Down Expand Up @@ -169,5 +147,5 @@ def run(self, data_file, data_bvals, data_bvecs, mask=None,
str(SNR_directions[2])
})

with open(os.path.join(out_dir, out_file), 'w') as myfile:
with open(os.path.join(out_dir, out_path), 'w') as myfile:
json.dump(data, myfile)
1 change: 1 addition & 0 deletions dipy/workflows/tests/test_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,5 +34,6 @@ def test_io_info():
pass
file.close()


if __name__ == '__main__':
test_io_info()
4 changes: 2 additions & 2 deletions dipy/workflows/tests/test_reconst_mapmri.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,8 +40,8 @@ def reconst_mmri_core(flow, lap, pos):
volume = vol_img.get_data()

mmri_flow = flow()
mmri_flow.run(data_file=data_path, data_bvals=bval_path,
data_bvecs=bvec_path, small_delta=0.0129,
mmri_flow.run(data_files=data_path, bvals_files=bval_path,
bvecs_files=bvec_path, small_delta=0.0129,
big_delta=0.0218, laplacian=lap,
positivity=pos, out_dir=out_dir)

Expand Down
6 changes: 3 additions & 3 deletions dipy/workflows/tests/test_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

import numpy as np

from nose.tools import assert_true, assert_equal
from nose.tools import assert_true

from dipy.data import get_data
from dipy.workflows.stats import SNRinCCFlow
Expand All @@ -25,7 +25,7 @@ def test_stats():
nib.save(mask_img, mask_path)

snr_flow = SNRinCCFlow(force=True)
args = [data_path, bval_path, bvec_path]
args = [data_path, bval_path, bvec_path, mask_path]

snr_flow.run(*args, out_dir=out_dir)
assert_true(os.path.exists(os.path.join(out_dir, 'product.json')))
Expand All @@ -38,7 +38,7 @@ def test_stats():
out_dir, 'mask_noise.nii.gz')).st_size != 0)

snr_flow._force_overwrite = True
snr_flow.run(*args, mask=mask_path, out_dir=out_dir)
snr_flow.run(*args, out_dir=out_dir)
assert_true(os.path.exists(os.path.join(out_dir, 'product.json')))
assert_true(os.stat(os.path.join(
out_dir, 'product.json')).st_size != 0)
Expand Down

0 comments on commit 52cb7c6

Please sign in to comment.