Skip to content

Commit 469fbd9

Browse files
author
Ben Cipollini
committed
STY: autopep8 on source code.
1 parent 5970787 commit 469fbd9

39 files changed

+212
-171
lines changed

nibabel/affines.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -214,7 +214,7 @@ def append_diag(aff, steps, starts=()):
214214
starts = np.zeros(n_steps, dtype=steps.dtype)
215215
elif len(starts) != n_steps:
216216
raise ValueError('Steps should have same length as starts')
217-
old_n_out, old_n_in = aff.shape[0]-1, aff.shape[1]-1
217+
old_n_out, old_n_in = aff.shape[0] - 1, aff.shape[1] - 1
218218
# make new affine
219219
aff_plus = np.zeros((old_n_out + n_steps + 1,
220220
old_n_in + n_steps + 1), dtype=aff.dtype)
@@ -223,7 +223,7 @@ def append_diag(aff, steps, starts=()):
223223
aff_plus[:old_n_out, -1] = aff[:old_n_out, -1]
224224
# Add new diagonal elements
225225
for i, el in enumerate(steps):
226-
aff_plus[old_n_out+i, old_n_in+i] = el
226+
aff_plus[old_n_out + i, old_n_in + i] = el
227227
# Add translations for new affine, plus last 1
228228
aff_plus[old_n_out:, -1] = list(starts) + [1]
229229
return aff_plus

nibabel/analyze.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,7 @@
107107
('session_error', 'i2'),
108108
('regular', 'S1'),
109109
('hkey_un0', 'S1')
110-
]
110+
]
111111
image_dimension_dtd = [
112112
('dim', 'i2', (8,)),
113113
('vox_units', 'S4'),
@@ -127,7 +127,7 @@
127127
('verified', 'i4'),
128128
('glmax', 'i4'),
129129
('glmin', 'i4')
130-
]
130+
]
131131
data_history_dtd = [
132132
('descrip', 'S80'),
133133
('aux_file', 'S24'),
@@ -147,7 +147,7 @@
147147
('omin', 'i4'),
148148
('smax', 'i4'),
149149
('smin', 'i4')
150-
]
150+
]
151151

152152
# Full header numpy dtype combined across sub-fields
153153
header_dtype = np.dtype(header_key_dtd + image_dimension_dtd +
@@ -606,7 +606,7 @@ def get_data_shape(self):
606606
ndims = dims[0]
607607
if ndims == 0:
608608
return 0,
609-
return tuple(int(d) for d in dims[1:ndims+1])
609+
return tuple(int(d) for d in dims[1:ndims + 1])
610610

611611
def set_data_shape(self, shape):
612612
''' Set shape of data
@@ -624,18 +624,18 @@ def set_data_shape(self, shape):
624624
dims[:] = 1
625625
dims[0] = ndims
626626
try:
627-
dims[1:ndims+1] = shape
627+
dims[1:ndims + 1] = shape
628628
except (ValueError, OverflowError):
629629
# numpy 1.4.1 at least generates a ValueError from trying to set a
630630
# python long into an int64 array (dims are int64 for nifti2)
631631
values_fit = False
632632
else:
633-
values_fit = np.all(dims[1:ndims+1] == shape)
633+
values_fit = np.all(dims[1:ndims + 1] == shape)
634634
# Error if we did not succeed setting dimensions
635635
if not values_fit:
636636
raise HeaderDataError('shape %s does not fit in dim datatype' %
637637
(shape,))
638-
self._structarr['pixdim'][ndims+1:] = 1.0
638+
self._structarr['pixdim'][ndims + 1:] = 1.0
639639

640640
def get_base_affine(self):
641641
''' Get affine from basic (shared) header fields
@@ -659,8 +659,8 @@ def get_base_affine(self):
659659
hdr = self._structarr
660660
dims = hdr['dim']
661661
ndim = dims[0]
662-
return shape_zoom_affine(hdr['dim'][1:ndim+1],
663-
hdr['pixdim'][1:ndim+1],
662+
return shape_zoom_affine(hdr['dim'][1:ndim + 1],
663+
hdr['pixdim'][1:ndim + 1],
664664
self.default_x_flip)
665665

666666
get_best_affine = get_base_affine
@@ -691,7 +691,7 @@ def get_zooms(self):
691691
if ndim == 0:
692692
return (1.0,)
693693
pixdims = hdr['pixdim']
694-
return tuple(pixdims[1:ndim+1])
694+
return tuple(pixdims[1:ndim + 1])
695695

696696
def set_zooms(self, zooms):
697697
''' Set zooms into header fields
@@ -708,7 +708,7 @@ def set_zooms(self, zooms):
708708
if np.any(zooms < 0):
709709
raise HeaderDataError('zooms must be positive')
710710
pixdims = hdr['pixdim']
711-
pixdims[1:ndim+1] = zooms[:]
711+
pixdims[1:ndim + 1] = zooms[:]
712712

713713
def as_analyze_map(self):
714714
""" Return header as mapping for conversion to Analyze types
@@ -794,7 +794,7 @@ def set_slope_inter(self, slope, inter=None):
794794
If float, value must be 0.0 or we raise a ``HeaderTypeError``
795795
'''
796796
if ((slope in (None, 1) or np.isnan(slope)) and
797-
(inter in (None, 0) or np.isnan(inter))):
797+
(inter in (None, 0) or np.isnan(inter))):
798798
return
799799
raise HeaderTypeError('Cannot set slope != 1 or intercept != 0 '
800800
'for Analyze headers')

nibabel/batteryrunners.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -175,6 +175,7 @@ def __len__(self):
175175

176176

177177
class Report(object):
178+
178179
def __init__(self,
179180
error=Exception,
180181
problem_level=0,

nibabel/casting.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -272,7 +272,7 @@ def type_info(np_type):
272272
if not np_type in (np.longdouble, np.longcomplex) or width not in (16, 32):
273273
raise FloatingError('We had not expected type %s' % np_type)
274274
if (vals == (1, 1, 16) and on_powerpc() and
275-
_check_maxexp(np.longdouble, 1024)):
275+
_check_maxexp(np.longdouble, 1024)):
276276
# double pair on PPC. The _check_nmant routine does not work for this
277277
# type, hence the powerpc platform check instead
278278
ret.update(dict(nmant=106, width=width))
@@ -664,7 +664,7 @@ def best_float():
664664
except FloatingError:
665665
return np.float64
666666
if (long_info['nmant'] > type_info(np.float64)['nmant'] and
667-
machine() != 'sparc64'): # sparc has crazy-slow float128
667+
machine() != 'sparc64'): # sparc has crazy-slow float128
668668
return np.longdouble
669669
return np.float64
670670

nibabel/checkwarns.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919

2020

2121
class ErrorWarnings(error_warnings):
22+
2223
def __init__(self, *args, **kwargs):
2324
warnings.warn('ErrorWarnings is deprecated and will be removed in '
2425
'nibabel v3.0; use nibabel.testing.error_warnings.',
@@ -27,6 +28,7 @@ def __init__(self, *args, **kwargs):
2728

2829

2930
class IgnoreWarnings(suppress_warnings):
31+
3032
def __init__(self, *args, **kwargs):
3133
warnings.warn('IgnoreWarnings is deprecated and will be removed in '
3234
'nibabel v3.0; use nibabel.testing.suppress_warnings.',

nibabel/data.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ class BomberError(DataError, AttributeError):
3333

3434
class Datasource(object):
3535
''' Simple class to add base path to relative path '''
36+
3637
def __init__(self, base_path):
3738
''' Initialize datasource
3839
@@ -87,7 +88,7 @@ def list_files(self, relative=True):
8788
out_list = list()
8889
for base, dirs, files in os.walk(self.base_path):
8990
if relative:
90-
base = base[len(self.base_path)+1:]
91+
base = base[len(self.base_path) + 1:]
9192
for filename in files:
9293
out_list.append(pjoin(base, filename))
9394
return out_list
@@ -97,6 +98,7 @@ class VersionedDatasource(Datasource):
9798
''' Datasource with version information in config file
9899
99100
'''
101+
100102
def __init__(self, base_path, config_filename=None):
101103
''' Initialize versioned datasource
102104
@@ -239,8 +241,8 @@ def find_data_dir(root_dirs, *names):
239241
if os.path.isdir(pth):
240242
return pth
241243
raise DataError('Could not find datasource "%s" in data path "%s"' %
242-
(ds_relative,
243-
os.path.pathsep.join(root_dirs)))
244+
(ds_relative,
245+
os.path.pathsep.join(root_dirs)))
244246

245247

246248
def make_datasource(pkg_def, **kwargs):
@@ -304,6 +306,7 @@ def make_datasource(pkg_def, **kwargs):
304306

305307
class Bomber(object):
306308
''' Class to raise an informative error when used '''
309+
307310
def __init__(self, name, msg):
308311
self.name = name
309312
self.msg = msg
@@ -350,7 +353,7 @@ def datasource_or_bomber(pkg_def, **options):
350353
return Bomber(sys_relpath, str(e))
351354
# check version
352355
if (version is None or
353-
LooseVersion(ds.version) >= LooseVersion(version)):
356+
LooseVersion(ds.version) >= LooseVersion(version)):
354357
return ds
355358
if 'name' in pkg_def:
356359
pkg_name = pkg_def['name']

nibabel/deprecated.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ class ModuleProxy(object):
2424
when you do attribute access and return the attributes of the imported
2525
module.
2626
"""
27+
2728
def __init__(self, module_name):
2829
self._module_name = module_name
2930

nibabel/dft.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -57,10 +57,11 @@ def __init__(self, series, i, si):
5757

5858
def __str__(self):
5959
fmt = 'expecting instance number %d, got %d'
60-
return fmt % (self.i+1, self.si.instance_number)
60+
return fmt % (self.i + 1, self.si.instance_number)
6161

6262

6363
class _Study(object):
64+
6465
def __init__(self, d):
6566
self.uid = d['uid']
6667
self.date = d['date']
@@ -93,6 +94,7 @@ def patient_name_or_uid(self):
9394

9495

9596
class _Series(object):
97+
9698
def __init__(self, d):
9799
self.uid = d['uid']
98100
self.study = d['study']
@@ -160,7 +162,7 @@ def as_nifti(self):
160162
for (i, si) in enumerate(self.storage_instances):
161163
if i + 1 != si.instance_number:
162164
raise InstanceStackError(self, i, si)
163-
logger.info('reading %d/%d' % (i+1, len(self.storage_instances)))
165+
logger.info('reading %d/%d' % (i + 1, len(self.storage_instances)))
164166
d = self.storage_instances[i].dicom()
165167
data[i, :, :] = d.pixel_array
166168

@@ -190,7 +192,7 @@ def as_nifti(self):
190192
m = ((pdi * cosi[0], pdj * cosj[0], pdk * cosk[0], pos_1[0]),
191193
(pdi * cosi[1], pdj * cosj[1], pdk * cosk[1], pos_1[1]),
192194
(pdi * cosi[2], pdj * cosj[2], pdk * cosk[2], pos_1[2]),
193-
( 0, 0, 0, 1))
195+
(0, 0, 0, 1))
194196

195197
m = numpy.array(m)
196198

@@ -212,6 +214,7 @@ def nifti_size(self):
212214

213215

214216
class _StorageInstance(object):
217+
215218
def __init__(self, d):
216219
self.uid = d['uid']
217220
self.instance_number = d['instance_number']
@@ -238,6 +241,7 @@ def dicom(self):
238241

239242
class _db_nochange:
240243
"""context guard for read-only database access"""
244+
241245
def __enter__(self):
242246
self.c = DB.cursor()
243247
return self.c
@@ -251,6 +255,7 @@ def __exit__(self, type, value, traceback):
251255

252256
class _db_change:
253257
"""context guard for database access requiring a commit"""
258+
254259
def __enter__(self):
255260
self.c = DB.cursor()
256261
return self.c

nibabel/ecat.py

Lines changed: 12 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,7 @@
117117
('data_units', '32S'),
118118
('septa_state', np.uint16),
119119
('fill', '12S')
120-
]
120+
]
121121
hdr_dtype = np.dtype(main_header_dtd)
122122

123123

@@ -384,7 +384,7 @@ def read_mlist(fileobj, endianness):
384384
mlist = []
385385
return mlist
386386
# Use all but first housekeeping row
387-
mlists.append(rows[1:n_rows+1])
387+
mlists.append(rows[1:n_rows + 1])
388388
mlist_index += n_rows
389389
if mlist_block_no <= 2: # should block_no in (1, 2) be an error?
390390
break
@@ -467,7 +467,7 @@ def get_series_framenumbers(mlist):
467467
try:
468468
for frame_stored, (true_order, _) in frames_order.items():
469469
# frame as stored in file -> true number in series
470-
frame_dict[frame_stored] = trueframenumbers[true_order]+1
470+
frame_dict[frame_stored] = trueframenumbers[true_order] + 1
471471
return frame_dict
472472
except:
473473
raise IOError('Error in header or mlist order unknown')
@@ -571,7 +571,7 @@ def get_frame_affine(self, frame=0):
571571

572572
dims = self.get_shape(frame)
573573
# get translations from center of image
574-
origin_offset = (np.array(dims)-1) / 2.0
574+
origin_offset = (np.array(dims) - 1) / 2.0
575575
aff = np.diag(zooms)
576576
aff[:3, -1] = -origin_offset * zooms[:-1] + np.array([x_off, y_off,
577577
z_off])
@@ -664,6 +664,7 @@ class EcatImageArrayProxy(object):
664664
The array proxy allows us to freeze the passed fileobj and
665665
header such that it returns the expected data array.
666666
'''
667+
667668
def __init__(self, subheader):
668669
self._subheader = subheader
669670
self._data = None
@@ -706,7 +707,7 @@ def __getitem__(self, sliceobj):
706707
slice3 = sliceobj[ax_inds[3]]
707708
# We will load volume by volume. Make slicer into volume by dropping
708709
# index over the volume axis
709-
in_slicer = sliceobj[:ax_inds[3]] + sliceobj[ax_inds[3]+1:]
710+
in_slicer = sliceobj[:ax_inds[3]] + sliceobj[ax_inds[3] + 1:]
710711
# int index for 4th axis, load one slice
711712
if isinstance(slice3, Integral):
712713
data = self._subheader.data_from_fileobj(frame_mapping[slice3][0])
@@ -868,16 +869,16 @@ def from_file_map(klass, file_map):
868869
hdr_fid = hdr_file.get_prepare_fileobj(mode='rb')
869870
header = klass._header.from_fileobj(hdr_fid)
870871
hdr_copy = header.copy()
871-
### LOAD MLIST
872+
# LOAD MLIST
872873
mlist = np.zeros((header['num_frames'], 4), dtype=np.int32)
873874
mlist_data = read_mlist(hdr_fid, hdr_copy.endianness)
874875
mlist[:len(mlist_data)] = mlist_data
875-
### LOAD SUBHEADERS
876+
# LOAD SUBHEADERS
876877
subheaders = klass._subheader(hdr_copy, mlist, hdr_fid)
877-
### LOAD DATA
878-
## Class level ImageArrayProxy
878+
# LOAD DATA
879+
# Class level ImageArrayProxy
879880
data = klass.ImageArrayProxy(subheaders)
880-
## Get affine
881+
# Get affine
881882
if not subheaders._check_affines():
882883
warnings.warn('Affines different across frames, loading affine '
883884
'from FIRST frame', UserWarning)
@@ -961,7 +962,7 @@ def to_file_map(self, file_map=None):
961962
image = self._subheader.raw_data_from_fileobj(index)
962963

963964
# Write frame images
964-
self._write_data(image, imgf, pos+2, endianness='>')
965+
self._write_data(image, imgf, pos + 2, endianness='>')
965966

966967
# Move to dictionnary offset and write dictionnary entry
967968
self._write_data(mlist[index], imgf, entry_pos, endianness='>')

0 commit comments

Comments
 (0)