diff --git a/.travis.yml b/.travis.yml index da94d4e662..661959c6ec 100644 --- a/.travis.yml +++ b/.travis.yml @@ -88,7 +88,7 @@ before_install: - source venv/bin/activate - python --version # just to check - pip install -U pip wheel # upgrade to latest pip find 3.5 wheels; wheel to avoid errors - - retry pip install nose flake8 # always + - retry pip install nose flake8 mock # always - wheelhouse_pip_install $EXTRA_PIP_FLAGS $DEPENDS # pydicom <= 0.9.8 doesn't install on python 3 - if [ "${TRAVIS_PYTHON_VERSION:0:1}" == "2" ]; then diff --git a/appveyor.yml b/appveyor.yml index 5f2a5b746c..ae711f8e97 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -46,7 +46,7 @@ install: - "python -c \"import struct; print(struct.calcsize('P') * 8)\"" # Install the dependencies of the project. - - "conda install --yes --quiet numpy scipy matplotlib nose h5py" + - "conda install --yes --quiet numpy scipy matplotlib nose h5py mock" - "pip install pydicom" - "python setup.py install" - "SET NIBABEL_DATA_DIR=%CD%\\nibabel-data" diff --git a/bin/parrec2nii b/bin/parrec2nii index 4be7eaf93e..4856af9986 100755 --- a/bin/parrec2nii +++ b/bin/parrec2nii @@ -1,333 +1,8 @@ #!python """PAR/REC to NIfTI converter """ -from __future__ import division, print_function, absolute_import -from optparse import OptionParser, Option -import numpy as np -import numpy.linalg as npl -import sys -import os -import csv -import nibabel -import nibabel.parrec as pr -from nibabel.parrec import one_line -from nibabel.mriutils import calculate_dwell_time, MRIError -import nibabel.nifti1 as nifti1 -from nibabel.filename_parser import splitext_addext -from nibabel.volumeutils import fname_ext_ul_case -from nibabel.orientations import (io_orientation, inv_ornt_aff, - apply_orientation) -from nibabel.affines import apply_affine, from_matvec, to_matvec - - -def get_opt_parser(): - # use module docstring for help output - p = OptionParser( - usage="%s [OPTIONS] \n\n" % sys.argv[0] + __doc__, - version="%prog " + nibabel.__version__) - p.add_option( - Option("-v", "--verbose", action="store_true", dest="verbose", - default=False, - help="""Make some noise.""")) - p.add_option( - Option("-o", "--output-dir", action="store", type="string", - dest="outdir", default=None, - help=one_line("""Destination directory for NIfTI files. - Default: current directory."""))) - p.add_option( - Option("-c", "--compressed", action="store_true", - dest="compressed", default=False, - help="Whether to write compressed NIfTI files or not.")) - p.add_option( - Option("-p", "--permit-truncated", action="store_true", - dest="permit_truncated", default=False, - help=one_line( - """Permit conversion of truncated recordings. Support for - this is experimental, and results *must* be checked - afterward for validity."""))) - p.add_option( - Option("-b", "--bvs", action="store_true", dest="bvs", default=False, - help=one_line( - """Output bvals/bvecs files in addition to NIFTI - image."""))) - p.add_option( - Option("-d", "--dwell-time", action="store_true", default=False, - dest="dwell_time", - help=one_line( - """Calculate the scan dwell time. If supplied, the magnetic - field strength should also be supplied using - --field-strength (default 3). The field strength must be - supplied because it is not encoded in the PAR/REC - format."""))) - p.add_option( - Option("--field-strength", action="store", type="float", - dest="field_strength", - help=one_line( - """The magnetic field strength of the recording, only needed - for --dwell-time. The field strength must be supplied - because it is not encoded in the PAR/REC format."""))) - p.add_option( - Option("-i", "--volume-info", action="store_true", dest="vol_info", - default=False, - help=one_line( - """Export .PAR volume labels corresponding to the fourth - dimension of the data. The dimension info will be stored in - CSV format with the first row containing dimension labels - and the subsequent rows (one per volume), the corresponding - indices. Only labels that vary along the 4th dimension are - exported (e.g. for a single volume structural scan there - are no dynamic labels and no output file will be created). - """))) - p.add_option( - Option("--origin", action="store", dest="origin", default="scanner", - help=one_line( - """Reference point of the q-form transformation of the NIfTI - image. If 'scanner' the (0,0,0) coordinates will refer to - the scanner's iso center. If 'fov', this coordinate will be - the center of the recorded volume (field of view). Default: - 'scanner'."""))) - p.add_option( - Option("--minmax", action="store", nargs=2, dest="minmax", - help=one_line( - """Mininum and maximum settings to be stored in the NIfTI - header. If any of them is set to 'parse', the scaled data is - scanned for the actual minimum and maximum. To bypass this - potentially slow and memory intensive step (the data has to - be scaled and fully loaded into memory), fixed values can be - provided as space-separated pair, e.g. '5.4 120.4'. It is - possible to set a fixed minimum as scan for the actual - maximum (and vice versa). Default: 'parse parse'."""))) - p.set_defaults(minmax=('parse', 'parse')) - p.add_option( - Option("--store-header", action="store_true", dest="store_header", - default=False, - help=one_line( - """If set, all information from the PAR header is stored in - an extension ofthe NIfTI file header. Default: off"""))) - p.add_option( - Option("--scaling", action="store", dest="scaling", default='dv', - help=one_line( - """Choose data scaling setting. The PAR header defines two - different data scaling settings: 'dv' (values displayed on - console) and 'fp' (floating point values). Either one can be - chosen, or scaling can be disabled completely ('off'). Note - that neither method will actually scale the data, but just - store the corresponding settings in the NIfTI header, unless - non-uniform scaling is used, in which case the data is - stored in the file in scaled form. Default: 'dv'"""))) - p.add_option( - Option('--keep-trace', action="store_true", dest='keep_trace', - default=False, - help=one_line("""Do not discard the diagnostic Philips DTI - trace volume, if it exists in the data."""))) - p.add_option( - Option("--overwrite", action="store_true", dest="overwrite", - default=False, - help=one_line("""Overwrite file if it exists. Default: - False"""))) - p.add_option( - Option("--strict-sort", action="store_true", dest="strict_sort", - default=False, - help=one_line("""Use additional keys in determining the order - to sort the slices within the .REC file. This may be necessary - for more complicated scans with multiple echos, - cardiac phases, ASL label states, etc."""))) - return p - - -def verbose(msg, indent=0): - if verbose.switch: - print("%s%s" % (' ' * indent, msg)) - - -def error(msg, exit_code): - sys.stderr.write(msg + '\n') - sys.exit(exit_code) - - -def proc_file(infile, opts): - # figure out the output filename, and see if it exists - basefilename = splitext_addext(os.path.basename(infile))[0] - if opts.outdir is not None: - # set output path - basefilename = os.path.join(opts.outdir, basefilename) - - # prep a file - if opts.compressed: - verbose('Using gzip compression') - outfilename = basefilename + '.nii.gz' - else: - outfilename = basefilename + '.nii' - if os.path.isfile(outfilename) and not opts.overwrite: - raise IOError('Output file "%s" exists, use --overwrite to ' - 'overwrite it' % outfilename) - - # load the PAR header and data - scaling = 'dv' if opts.scaling == 'off' else opts.scaling - infile = fname_ext_ul_case(infile) - pr_img = pr.load(infile, - permit_truncated=opts.permit_truncated, - scaling=scaling, - strict_sort=opts.strict_sort) - pr_hdr = pr_img.header - affine = pr_hdr.get_affine(origin=opts.origin) - slope, intercept = pr_hdr.get_data_scaling(scaling) - if opts.scaling != 'off': - verbose('Using data scaling "%s"' % opts.scaling) - # get original scaling, and decide if we scale in-place or not - if opts.scaling == 'off': - slope = np.array([1.]) - intercept = np.array([0.]) - in_data = pr_img.dataobj.get_unscaled() - out_dtype = pr_hdr.get_data_dtype() - elif not np.any(np.diff(slope)) and not np.any(np.diff(intercept)): - # Single scalefactor case - slope = slope.ravel()[0] - intercept = intercept.ravel()[0] - in_data = pr_img.dataobj.get_unscaled() - out_dtype = pr_hdr.get_data_dtype() - else: - # Multi scalefactor case - slope = np.array([1.]) - intercept = np.array([0.]) - in_data = np.array(pr_img.dataobj) - out_dtype = np.float64 - # Reorient data block to LAS+ if necessary - ornt = io_orientation(np.diag([-1, 1, 1, 1]).dot(affine)) - if np.all(ornt == [[0, 1], - [1, 1], - [2, 1]]): # already in LAS+ - t_aff = np.eye(4) - else: # Not in LAS+ - t_aff = inv_ornt_aff(ornt, pr_img.shape) - affine = np.dot(affine, t_aff) - in_data = apply_orientation(in_data, ornt) - - bvals, bvecs = pr_hdr.get_bvals_bvecs() - if not opts.keep_trace: # discard Philips DTI trace if present - if bvecs is not None: - bad_mask = np.logical_and(bvals != 0, (bvecs == 0).all(axis=1)) - if bad_mask.sum() > 0: - pl = 's' if bad_mask.sum() != 1 else '' - verbose('Removing %s DTI trace volume%s' - % (bad_mask.sum(), pl)) - good_mask = ~bad_mask - in_data = in_data[..., good_mask] - bvals = bvals[good_mask] - bvecs = bvecs[good_mask] - - # Make corresponding NIfTI image - nimg = nifti1.Nifti1Image(in_data, affine, pr_hdr) - nhdr = nimg.header - nhdr.set_data_dtype(out_dtype) - nhdr.set_slope_inter(slope, intercept) - - if 'parse' in opts.minmax: - # need to get the scaled data - verbose('Loading (and scaling) the data to determine value range') - if opts.minmax[0] == 'parse': - nhdr['cal_min'] = in_data.min() * slope + intercept - else: - nhdr['cal_min'] = float(opts.minmax[0]) - if opts.minmax[1] == 'parse': - nhdr['cal_max'] = in_data.max() * slope + intercept - else: - nhdr['cal_max'] = float(opts.minmax[1]) - - # container for potential NIfTI1 header extensions - if opts.store_header: - # dump the full PAR header content into an extension - with open(infile, 'rb') as fobj: # contents must be bytes - hdr_dump = fobj.read() - dump_ext = nifti1.Nifti1Extension('comment', hdr_dump) - nhdr.extensions.append(dump_ext) - - verbose('Writing %s' % outfilename) - nibabel.save(nimg, outfilename) - - # write out bvals/bvecs if requested - if opts.bvs: - if bvals is None and bvecs is None: - verbose('No DTI volumes detected, bvals and bvecs not written') - elif bvecs is None: - verbose('DTI volumes detected, but no diffusion direction info was' - 'found. Writing .bvals file only.') - with open(basefilename + '.bvals', 'w') as fid: - # np.savetxt could do this, but it's just a loop anyway - for val in bvals: - fid.write('%s ' % val) - fid.write('\n') - else: - verbose('Writing .bvals and .bvecs files') - # Transform bvecs with reorientation affine - orig2new = npl.inv(t_aff) - bv_reorient = from_matvec(to_matvec(orig2new)[0], [0, 0, 0]) - bvecs = apply_affine(bv_reorient, bvecs) - with open(basefilename + '.bvals', 'w') as fid: - # np.savetxt could do this, but it's just a loop anyway - for val in bvals: - fid.write('%s ' % val) - fid.write('\n') - with open(basefilename + '.bvecs', 'w') as fid: - for row in bvecs.T: - for val in row: - fid.write('%s ' % val) - fid.write('\n') - - # export data labels varying along the 4th dimensions if requested - if opts.vol_info: - labels = pr_img.header.get_volume_labels() - if len(labels) > 0: - vol_keys = list(labels.keys()) - with open(basefilename + '.ordering.csv', 'w') as csvfile: - csvwriter = csv.writer(csvfile, delimiter=',') - csvwriter.writerow(vol_keys) - for vals in zip(*[labels[k] for k in vol_keys]): - csvwriter.writerow(vals) - - # write out dwell time if requested - if opts.dwell_time: - try: - dwell_time = calculate_dwell_time( - pr_hdr.get_water_fat_shift(), - pr_hdr.get_echo_train_length(), - opts.field_strength) - except MRIError: - verbose('No EPI factors, dwell time not written') - else: - verbose('Writing dwell time (%r sec) calculated assuming %sT ' - 'magnet' % (dwell_time, opts.field_strength)) - with open(basefilename + '.dwell_time', 'w') as fid: - fid.write('%r\n' % dwell_time) - # done - - -def main(): - parser = get_opt_parser() - (opts, infiles) = parser.parse_args() - - verbose.switch = opts.verbose - - if opts.origin not in ['scanner', 'fov']: - error("Unrecognized value for --origin: '%s'." % opts.origin, 1) - if opts.dwell_time and opts.field_strength is None: - error('Need --field-strength for dwell time calculation', 1) - - # store any exceptions - errs = [] - for infile in infiles: - verbose('Processing %s' % infile) - try: - proc_file(infile, opts) - except Exception as e: - errs.append('%s: %s' % (infile, e)) - - if len(errs): - error('Caught %i exceptions. Dump follows:\n\n %s' - % (len(errs), '\n'.join(errs)), 1) - else: - verbose('Done') +from nibabel.parrec2nii_cmd import main if __name__ == '__main__': diff --git a/dev-requirements.txt b/dev-requirements.txt index 659ab6cada..f63af96cf4 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,3 +1,4 @@ # Requirements for running tests -r requirements.txt nose +mock diff --git a/doc/source/installation.rst b/doc/source/installation.rst index fd311f93ee..9c0b00b83c 100644 --- a/doc/source/installation.rst +++ b/doc/source/installation.rst @@ -92,6 +92,7 @@ Requirements * PyDICOM_ 0.9.7 or greater (optional, for DICOM support) * `Python Imaging Library`_ (optional, for PNG conversion in DICOMFS) * nose_ 0.11 or greater (optional, to run the tests) +* mock_ (optional, to run the tests) * sphinx_ (optional, to build the documentation) Get the development sources diff --git a/nibabel/__init__.py b/nibabel/__init__.py index 91ccace44b..cf4173fc27 100644 --- a/nibabel/__init__.py +++ b/nibabel/__init__.py @@ -67,16 +67,17 @@ from . import streamlines from . import viewers -# be friendly on systems with ancient numpy -- no tests, but at least -# importable +# Note test requirement for "mock". Requirement for "nose" tested by numpy. try: + import mock +except ImportError: + def test(*args, **kwargs): + raise RuntimeError('Need "mock" package for tests') +else: from numpy.testing import Tester test = Tester().test bench = Tester().bench - del Tester -except ImportError: - def test(*args, **kwargs): - raise RuntimeError('Need numpy >= 1.2 for tests') + del mock, Tester from .pkg_info import get_pkg_info as _get_pkg_info diff --git a/nibabel/parrec2nii_cmd.py b/nibabel/parrec2nii_cmd.py new file mode 100644 index 0000000000..95f5f74459 --- /dev/null +++ b/nibabel/parrec2nii_cmd.py @@ -0,0 +1,330 @@ +"""Code for PAR/REC to NIfTI converter command +""" +from __future__ import division, print_function, absolute_import + +from optparse import OptionParser, Option +import numpy as np +import numpy.linalg as npl +import sys +import os +import csv +import nibabel +import nibabel.parrec as pr +from nibabel.parrec import one_line +from nibabel.mriutils import calculate_dwell_time, MRIError +import nibabel.nifti1 as nifti1 +from nibabel.filename_parser import splitext_addext +from nibabel.volumeutils import fname_ext_ul_case +from nibabel.orientations import (io_orientation, inv_ornt_aff, + apply_orientation) +from nibabel.affines import apply_affine, from_matvec, to_matvec + + +def get_opt_parser(): + # use module docstring for help output + p = OptionParser( + usage="%s [OPTIONS] \n\n" % sys.argv[0] + __doc__, + version="%prog " + nibabel.__version__) + p.add_option( + Option("-v", "--verbose", action="store_true", dest="verbose", + default=False, + help="""Make some noise.""")) + p.add_option( + Option("-o", "--output-dir", action="store", type="string", + dest="outdir", default=None, + help=one_line("""Destination directory for NIfTI files. + Default: current directory."""))) + p.add_option( + Option("-c", "--compressed", action="store_true", + dest="compressed", default=False, + help="Whether to write compressed NIfTI files or not.")) + p.add_option( + Option("-p", "--permit-truncated", action="store_true", + dest="permit_truncated", default=False, + help=one_line( + """Permit conversion of truncated recordings. Support for + this is experimental, and results *must* be checked + afterward for validity."""))) + p.add_option( + Option("-b", "--bvs", action="store_true", dest="bvs", default=False, + help=one_line( + """Output bvals/bvecs files in addition to NIFTI + image."""))) + p.add_option( + Option("-d", "--dwell-time", action="store_true", default=False, + dest="dwell_time", + help=one_line( + """Calculate the scan dwell time. If supplied, the magnetic + field strength should also be supplied using + --field-strength (default 3). The field strength must be + supplied because it is not encoded in the PAR/REC + format."""))) + p.add_option( + Option("--field-strength", action="store", type="float", + dest="field_strength", + help=one_line( + """The magnetic field strength of the recording, only needed + for --dwell-time. The field strength must be supplied + because it is not encoded in the PAR/REC format."""))) + p.add_option( + Option("-i", "--volume-info", action="store_true", dest="vol_info", + default=False, + help=one_line( + """Export .PAR volume labels corresponding to the fourth + dimension of the data. The dimension info will be stored in + CSV format with the first row containing dimension labels + and the subsequent rows (one per volume), the corresponding + indices. Only labels that vary along the 4th dimension are + exported (e.g. for a single volume structural scan there + are no dynamic labels and no output file will be created). + """))) + p.add_option( + Option("--origin", action="store", dest="origin", default="scanner", + help=one_line( + """Reference point of the q-form transformation of the NIfTI + image. If 'scanner' the (0,0,0) coordinates will refer to + the scanner's iso center. If 'fov', this coordinate will be + the center of the recorded volume (field of view). Default: + 'scanner'."""))) + p.add_option( + Option("--minmax", action="store", nargs=2, dest="minmax", + help=one_line( + """Mininum and maximum settings to be stored in the NIfTI + header. If any of them is set to 'parse', the scaled data is + scanned for the actual minimum and maximum. To bypass this + potentially slow and memory intensive step (the data has to + be scaled and fully loaded into memory), fixed values can be + provided as space-separated pair, e.g. '5.4 120.4'. It is + possible to set a fixed minimum as scan for the actual + maximum (and vice versa). Default: 'parse parse'."""))) + p.set_defaults(minmax=('parse', 'parse')) + p.add_option( + Option("--store-header", action="store_true", dest="store_header", + default=False, + help=one_line( + """If set, all information from the PAR header is stored in + an extension ofthe NIfTI file header. Default: off"""))) + p.add_option( + Option("--scaling", action="store", dest="scaling", default='dv', + help=one_line( + """Choose data scaling setting. The PAR header defines two + different data scaling settings: 'dv' (values displayed on + console) and 'fp' (floating point values). Either one can be + chosen, or scaling can be disabled completely ('off'). Note + that neither method will actually scale the data, but just + store the corresponding settings in the NIfTI header, unless + non-uniform scaling is used, in which case the data is + stored in the file in scaled form. Default: 'dv'"""))) + p.add_option( + Option('--keep-trace', action="store_true", dest='keep_trace', + default=False, + help=one_line("""Do not discard the diagnostic Philips DTI + trace volume, if it exists in the data."""))) + p.add_option( + Option("--overwrite", action="store_true", dest="overwrite", + default=False, + help=one_line("""Overwrite file if it exists. Default: + False"""))) + p.add_option( + Option("--strict-sort", action="store_true", dest="strict_sort", + default=False, + help=one_line("""Use additional keys in determining the order + to sort the slices within the .REC file. This may be necessary + for more complicated scans with multiple echos, + cardiac phases, ASL label states, etc."""))) + return p + + +def verbose(msg, indent=0): + if verbose.switch: + print("%s%s" % (' ' * indent, msg)) + + +def error(msg, exit_code): + sys.stderr.write(msg + '\n') + sys.exit(exit_code) + + +def proc_file(infile, opts): + # figure out the output filename, and see if it exists + basefilename = splitext_addext(os.path.basename(infile))[0] + if opts.outdir is not None: + # set output path + basefilename = os.path.join(opts.outdir, basefilename) + + # prep a file + if opts.compressed: + verbose('Using gzip compression') + outfilename = basefilename + '.nii.gz' + else: + outfilename = basefilename + '.nii' + if os.path.isfile(outfilename) and not opts.overwrite: + raise IOError('Output file "%s" exists, use --overwrite to ' + 'overwrite it' % outfilename) + + # load the PAR header and data + scaling = 'dv' if opts.scaling == 'off' else opts.scaling + infile = fname_ext_ul_case(infile) + pr_img = pr.load(infile, + permit_truncated=opts.permit_truncated, + scaling=scaling, + strict_sort=opts.strict_sort) + pr_hdr = pr_img.header + affine = pr_hdr.get_affine(origin=opts.origin) + slope, intercept = pr_hdr.get_data_scaling(scaling) + if opts.scaling != 'off': + verbose('Using data scaling "%s"' % opts.scaling) + # get original scaling, and decide if we scale in-place or not + if opts.scaling == 'off': + slope = np.array([1.]) + intercept = np.array([0.]) + in_data = pr_img.dataobj.get_unscaled() + out_dtype = pr_hdr.get_data_dtype() + elif not np.any(np.diff(slope)) and not np.any(np.diff(intercept)): + # Single scalefactor case + slope = slope.ravel()[0] + intercept = intercept.ravel()[0] + in_data = pr_img.dataobj.get_unscaled() + out_dtype = pr_hdr.get_data_dtype() + else: + # Multi scalefactor case + slope = np.array([1.]) + intercept = np.array([0.]) + in_data = np.array(pr_img.dataobj) + out_dtype = np.float64 + # Reorient data block to LAS+ if necessary + ornt = io_orientation(np.diag([-1, 1, 1, 1]).dot(affine)) + if np.all(ornt == [[0, 1], + [1, 1], + [2, 1]]): # already in LAS+ + t_aff = np.eye(4) + else: # Not in LAS+ + t_aff = inv_ornt_aff(ornt, pr_img.shape) + affine = np.dot(affine, t_aff) + in_data = apply_orientation(in_data, ornt) + + bvals, bvecs = pr_hdr.get_bvals_bvecs() + if not opts.keep_trace: # discard Philips DTI trace if present + if bvecs is not None: + bad_mask = np.logical_and(bvals != 0, (bvecs == 0).all(axis=1)) + if bad_mask.sum() > 0: + pl = 's' if bad_mask.sum() != 1 else '' + verbose('Removing %s DTI trace volume%s' + % (bad_mask.sum(), pl)) + good_mask = ~bad_mask + in_data = in_data[..., good_mask] + bvals = bvals[good_mask] + bvecs = bvecs[good_mask] + + # Make corresponding NIfTI image + nimg = nifti1.Nifti1Image(in_data, affine, pr_hdr) + nhdr = nimg.header + nhdr.set_data_dtype(out_dtype) + nhdr.set_slope_inter(slope, intercept) + nhdr.set_qform(affine, code=2) + + if 'parse' in opts.minmax: + # need to get the scaled data + verbose('Loading (and scaling) the data to determine value range') + if opts.minmax[0] == 'parse': + nhdr['cal_min'] = in_data.min() * slope + intercept + else: + nhdr['cal_min'] = float(opts.minmax[0]) + if opts.minmax[1] == 'parse': + nhdr['cal_max'] = in_data.max() * slope + intercept + else: + nhdr['cal_max'] = float(opts.minmax[1]) + + # container for potential NIfTI1 header extensions + if opts.store_header: + # dump the full PAR header content into an extension + with open(infile, 'rb') as fobj: # contents must be bytes + hdr_dump = fobj.read() + dump_ext = nifti1.Nifti1Extension('comment', hdr_dump) + nhdr.extensions.append(dump_ext) + + verbose('Writing %s' % outfilename) + nibabel.save(nimg, outfilename) + + # write out bvals/bvecs if requested + if opts.bvs: + if bvals is None and bvecs is None: + verbose('No DTI volumes detected, bvals and bvecs not written') + elif bvecs is None: + verbose('DTI volumes detected, but no diffusion direction info was' + 'found. Writing .bvals file only.') + with open(basefilename + '.bvals', 'w') as fid: + # np.savetxt could do this, but it's just a loop anyway + for val in bvals: + fid.write('%s ' % val) + fid.write('\n') + else: + verbose('Writing .bvals and .bvecs files') + # Transform bvecs with reorientation affine + orig2new = npl.inv(t_aff) + bv_reorient = from_matvec(to_matvec(orig2new)[0], [0, 0, 0]) + bvecs = apply_affine(bv_reorient, bvecs) + with open(basefilename + '.bvals', 'w') as fid: + # np.savetxt could do this, but it's just a loop anyway + for val in bvals: + fid.write('%s ' % val) + fid.write('\n') + with open(basefilename + '.bvecs', 'w') as fid: + for row in bvecs.T: + for val in row: + fid.write('%s ' % val) + fid.write('\n') + + # export data labels varying along the 4th dimensions if requested + if opts.vol_info: + labels = pr_img.header.get_volume_labels() + if len(labels) > 0: + vol_keys = list(labels.keys()) + with open(basefilename + '.ordering.csv', 'w') as csvfile: + csvwriter = csv.writer(csvfile, delimiter=',') + csvwriter.writerow(vol_keys) + for vals in zip(*[labels[k] for k in vol_keys]): + csvwriter.writerow(vals) + + # write out dwell time if requested + if opts.dwell_time: + try: + dwell_time = calculate_dwell_time( + pr_hdr.get_water_fat_shift(), + pr_hdr.get_echo_train_length(), + opts.field_strength) + except MRIError: + verbose('No EPI factors, dwell time not written') + else: + verbose('Writing dwell time (%r sec) calculated assuming %sT ' + 'magnet' % (dwell_time, opts.field_strength)) + with open(basefilename + '.dwell_time', 'w') as fid: + fid.write('%r\n' % dwell_time) + # done + + +def main(): + parser = get_opt_parser() + (opts, infiles) = parser.parse_args() + + verbose.switch = opts.verbose + + if opts.origin not in ['scanner', 'fov']: + error("Unrecognized value for --origin: '%s'." % opts.origin, 1) + if opts.dwell_time and opts.field_strength is None: + error('Need --field-strength for dwell time calculation', 1) + + # store any exceptions + errs = [] + for infile in infiles: + verbose('Processing %s' % infile) + try: + proc_file(infile, opts) + except Exception as e: + errs.append('%s: %s' % (infile, e)) + + if len(errs): + error('Caught %i exceptions. Dump follows:\n\n %s' + % (len(errs), '\n'.join(errs)), 1) + else: + verbose('Done') diff --git a/nibabel/tests/test_parrec2nii.py b/nibabel/tests/test_parrec2nii.py new file mode 100644 index 0000000000..2008988d83 --- /dev/null +++ b/nibabel/tests/test_parrec2nii.py @@ -0,0 +1,103 @@ +""" Tests for the parrec2nii exe code +""" +import imp +from os.path import join, isfile, basename + +import numpy +from numpy import array as npa + +import nibabel +from nibabel import parrec2nii_cmd as parrec2nii + +from mock import Mock, MagicMock +from nose.tools import assert_true +from numpy.testing import (assert_almost_equal, assert_array_equal) + +from nibabel.tests.test_parrec import EG_PAR, VARY_PAR +from nibabel.tmpdirs import InTemporaryDirectory + + +AN_OLD_AFFINE = numpy.array( + [[-3.64994708, 0., 1.83564171, 123.66276611], + [0., -3.75, 0., 115.617], + [0.86045705, 0., 7.78655376, -27.91161211], + [0., 0., 0., 1.]]) + +PAR_AFFINE = numpy.array( +[[ -3.64994708, 0. , 1.83564171, 107.63076611], + [ 0. , 3.75, 0. , -118.125 ], + [ 0.86045705, 0. , 7.78655376, -58.25061211], + [ 0. , 0. , 0. , 1. ]]) + + +def teardown(): + # Reload tested module to clear run-time settings in tests + imp.reload(parrec2nii) + + +def test_parrec2nii_sets_qform_with_code2(): + """Unit test that ensures that set_qform() is called on the new header. + """ + imp.reload(parrec2nii) + parrec2nii.verbose.switch = False + + parrec2nii.io_orientation = Mock() + parrec2nii.io_orientation.return_value = [[0, 1],[1, 1],[2, 1]] # LAS+ + + parrec2nii.nifti1 = Mock() + nimg = Mock() + nhdr = MagicMock() + nimg.header = nhdr + parrec2nii.nifti1.Nifti1Image.return_value = nimg + + parrec2nii.pr = Mock() + pr_img = Mock() + pr_hdr = Mock() + pr_hdr.get_data_scaling.return_value = (npa([]), npa([])) + pr_hdr.get_bvals_bvecs.return_value = (None, None) + pr_hdr.get_affine.return_value = AN_OLD_AFFINE + pr_img.header = pr_hdr + parrec2nii.pr.load.return_value = pr_img + + opts = Mock() + opts.outdir = None + opts.scaling = 'off' + opts.minmax = [1, 1] + opts.store_header = False + opts.bvs = False + opts.vol_info = False + opts.dwell_time = False + + infile = 'nonexistent.PAR' + parrec2nii.proc_file(infile, opts) + nhdr.set_qform.assert_called_with(pr_hdr.get_affine(), code=2) + + +def test_parrec2nii_save_load_qform_code(): + """Tests that after parrec2nii saves file, it has the qform 'code' + set to '2', which means 'aligned', so that other software, e.g. FSL + picks up the qform. + """ + imp.reload(parrec2nii) + parrec2nii.verbose.switch = False + + opts = Mock() + opts.outdir = None + opts.scaling = 'off' + opts.minmax = [1, 1] + opts.store_header = False + opts.bvs = False + opts.vol_info = False + opts.dwell_time = False + opts.compressed = False + + with InTemporaryDirectory() as pth: + opts.outdir = pth + for fname in [EG_PAR, VARY_PAR]: + parrec2nii.proc_file(fname, opts) + outfname = join(pth, basename(fname)).replace('.PAR', '.nii') + assert_true(isfile(outfname)) + img = nibabel.load(outfname) + assert_almost_equal(img.get_affine(), PAR_AFFINE, 4) + assert_array_equal(img.header['qform_code'], 2) + assert_array_equal(img.header['sform_code'], 2)