Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

STY: Apply ruff/flake8-simplify rules (SIM) #3676

Merged
merged 8 commits into from
Oct 6, 2024
8 changes: 4 additions & 4 deletions nipype/algorithms/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -684,7 +684,7 @@ def _run_interface(self, runtime):

output_array = merge_csvs(self.inputs.in_files)
_, name, ext = split_filename(self.inputs.out_file)
if not ext == ".csv":
if ext != ".csv":
ext = ".csv"

out_file = op.abspath(name + ext)
Expand Down Expand Up @@ -725,7 +725,7 @@ def _run_interface(self, runtime):
def _list_outputs(self):
outputs = self.output_spec().get()
_, name, ext = split_filename(self.inputs.out_file)
if not ext == ".csv":
if ext != ".csv":
ext = ".csv"
out_file = op.abspath(name + ext)
outputs["csv_file"] = out_file
Expand Down Expand Up @@ -771,7 +771,7 @@ class AddCSVColumn(BaseInterface):
def _run_interface(self, runtime):
in_file = open(self.inputs.in_file)
_, name, ext = split_filename(self.inputs.out_file)
if not ext == ".csv":
if ext != ".csv":
ext = ".csv"
out_file = op.abspath(name + ext)

Expand All @@ -791,7 +791,7 @@ def _run_interface(self, runtime):
def _list_outputs(self):
outputs = self.output_spec().get()
_, name, ext = split_filename(self.inputs.out_file)
if not ext == ".csv":
if ext != ".csv":
ext = ".csv"
out_file = op.abspath(name + ext)
outputs["csv_file"] = out_file
Expand Down
4 changes: 1 addition & 3 deletions nipype/interfaces/afni/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -326,6 +326,4 @@ def _cmd_prefix(self):

def no_afni():
"""Check whether AFNI is not available."""
if Info.version() is None:
return True
return False
return Info.version() is None
4 changes: 2 additions & 2 deletions nipype/interfaces/afni/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -636,7 +636,7 @@ def _parse_inputs(self, skip=None):
def _list_outputs(self):
outputs = self.output_spec().get()

for key in outputs.keys():
for key in outputs:
if isdefined(self.inputs.get()[key]):
outputs[key] = os.path.abspath(self.inputs.get()[key])

Expand Down Expand Up @@ -722,7 +722,7 @@ class Synthesize(AFNICommand):
def _list_outputs(self):
outputs = self.output_spec().get()

for key in outputs.keys():
for key in outputs:
if isdefined(self.inputs.get()[key]):
outputs[key] = os.path.abspath(self.inputs.get()[key])

Expand Down
2 changes: 1 addition & 1 deletion nipype/interfaces/afni/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ def aggregate_outputs(self, runtime=None, needed_outputs=None):
m = re.search(pattern, line)
if m:
d = m.groupdict()
outputs.trait_set(**{k: int(d[k]) for k in d.keys()})
outputs.trait_set(**{k: int(v) for k, v in d.items()})
return outputs


Expand Down
10 changes: 3 additions & 7 deletions nipype/interfaces/base/specs.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,19 +177,15 @@ def get_traitsfree(self, **kwargs):

def _clean_container(self, objekt, undefinedval=None, skipundefined=False):
"""Convert a traited object into a pure python representation."""
if isinstance(objekt, TraitDictObject) or isinstance(objekt, dict):
if isinstance(objekt, (TraitDictObject, dict)):
out = {}
for key, val in list(objekt.items()):
if isdefined(val):
out[key] = self._clean_container(val, undefinedval)
else:
if not skipundefined:
out[key] = undefinedval
elif (
isinstance(objekt, TraitListObject)
or isinstance(objekt, list)
or isinstance(objekt, tuple)
):
elif isinstance(objekt, (TraitListObject, list, tuple)):
out = []
for val in objekt:
if isdefined(val):
Expand Down Expand Up @@ -387,7 +383,7 @@ def __deepcopy__(self, memo):
dup_dict = deepcopy(self.trait_get(), memo)
# access all keys
for key in self.copyable_trait_names():
if key in self.__dict__.keys():
if key in self.__dict__:
_ = getattr(self, key)
# clone once
dup = self.clone_traits(memo=memo)
Expand Down
12 changes: 6 additions & 6 deletions nipype/interfaces/cmtk/cmtk.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def get_rois_crossed(pointsmm, roiData, voxelSize):
x = int(pointsmm[j, 0] / float(voxelSize[0]))
y = int(pointsmm[j, 1] / float(voxelSize[1]))
z = int(pointsmm[j, 2] / float(voxelSize[2]))
if not roiData[x, y, z] == 0:
if roiData[x, y, z] != 0:
rois_crossed.append(roiData[x, y, z])
rois_crossed = list(
dict.fromkeys(rois_crossed).keys()
Expand All @@ -91,7 +91,7 @@ def get_connectivity_matrix(n_rois, list_of_roi_crossed_lists):
for idx_i, roi_i in enumerate(rois_crossed):
for idx_j, roi_j in enumerate(rois_crossed):
if idx_i > idx_j:
if not roi_i == roi_j:
if roi_i != roi_j:
connectivity_matrix[roi_i - 1, roi_j - 1] += 1
connectivity_matrix = connectivity_matrix + connectivity_matrix.T
return connectivity_matrix
Expand Down Expand Up @@ -371,7 +371,7 @@ def cmat(
di["fiber_length_mean"] = 0
di["fiber_length_median"] = 0
di["fiber_length_std"] = 0
if not u == v: # Fix for self loop problem
if u != v: # Fix for self loop problem
G.add_edge(u, v, **di)
if "fiblist" in d:
numfib.add_edge(u, v, weight=di["number_of_fibers"])
Expand Down Expand Up @@ -400,7 +400,7 @@ def cmat(
pickle.dump(I, f, pickle.HIGHEST_PROTOCOL)

path, name, ext = split_filename(matrix_mat_name)
if not ext == ".mat":
if ext != ".mat":
ext = ".mat"
matrix_mat_name = matrix_mat_name + ext

Expand Down Expand Up @@ -608,7 +608,7 @@ def _run_interface(self, runtime):

matrix_mat_file = op.abspath(self.inputs.out_matrix_mat_file)
path, name, ext = split_filename(matrix_mat_file)
if not ext == ".mat":
if ext != ".mat":
ext = ".mat"
matrix_mat_file = matrix_mat_file + ext

Expand Down Expand Up @@ -673,7 +673,7 @@ def _list_outputs(self):

matrix_mat_file = op.abspath(self.inputs.out_matrix_mat_file)
path, name, ext = split_filename(matrix_mat_file)
if not ext == ".mat":
if ext != ".mat":
ext = ".mat"
matrix_mat_file = matrix_mat_file + ext

Expand Down
14 changes: 7 additions & 7 deletions nipype/interfaces/cmtk/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,17 +194,17 @@ def _run_interface(self, runtime):
for data in self.inputs.data_files:
_, data_name, _ = split_filename(data)
cda = cf.CData(name=data_name, src=data, fileformat="NumPy")
if not string.find(data_name, "lengths") == -1:
if 'lengths' in data_name:
cda.dtype = "FinalFiberLengthArray"
if not string.find(data_name, "endpoints") == -1:
if 'endpoints' in data_name:
cda.dtype = "FiberEndpoints"
if not string.find(data_name, "labels") == -1:
if 'labels' in data_name:
cda.dtype = "FinalFiberLabels"
a.add_connectome_data(cda)

a.print_summary()
_, name, ext = split_filename(self.inputs.out_file)
if not ext == ".cff":
if ext != '.cff':
ext = ".cff"
cf.save_to_cff(a, op.abspath(name + ext))

Expand All @@ -213,7 +213,7 @@ def _run_interface(self, runtime):
def _list_outputs(self):
outputs = self._outputs().get()
_, name, ext = split_filename(self.inputs.out_file)
if not ext == ".cff":
if ext != '.cff':
ext = ".cff"
outputs["connectome_file"] = op.abspath(name + ext)
return outputs
Expand Down Expand Up @@ -281,7 +281,7 @@ def _run_interface(self, runtime):
metadata.set_email("My Email")

_, name, ext = split_filename(self.inputs.out_file)
if not ext == ".cff":
if ext != '.cff':
ext = ".cff"
cf.save_to_cff(newcon, op.abspath(name + ext))

Expand All @@ -290,7 +290,7 @@ def _run_interface(self, runtime):
def _list_outputs(self):
outputs = self._outputs().get()
_, name, ext = split_filename(self.inputs.out_file)
if not ext == ".cff":
if ext != '.cff':
ext = ".cff"
outputs["connectome_file"] = op.abspath(name + ext)
return outputs
12 changes: 6 additions & 6 deletions nipype/interfaces/cmtk/nx.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,8 +166,8 @@ def average_networks(in_files, ntwk_res_file, group_id):
for edge in edges:
data = ntwk.edge[edge[0]][edge[1]]
if ntwk.edge[edge[0]][edge[1]]["count"] >= count_to_keep_edge:
for key in list(data.keys()):
if not key == "count":
for key in data:
if key != "count":
data[key] = data[key] / len(in_files)
ntwk.edge[edge[0]][edge[1]] = data
avg_ntwk.add_edge(edge[0], edge[1], **data)
Expand All @@ -183,8 +183,8 @@ def average_networks(in_files, ntwk_res_file, group_id):
avg_edges = avg_ntwk.edges()
for edge in avg_edges:
data = avg_ntwk.edge[edge[0]][edge[1]]
for key in list(data.keys()):
if not key == "count":
for key in data:
if key != "count":
edge_dict[key] = np.zeros(
(avg_ntwk.number_of_nodes(), avg_ntwk.number_of_nodes())
)
Expand Down Expand Up @@ -342,7 +342,7 @@ def add_node_data(node_array, ntwk):
node_ntwk = nx.Graph()
newdata = {}
for idx, data in ntwk.nodes(data=True):
if not int(idx) == 0:
if int(idx) != 0:
newdata["value"] = node_array[int(idx) - 1]
data.update(newdata)
node_ntwk.add_node(int(idx), **data)
Expand All @@ -354,7 +354,7 @@ def add_edge_data(edge_array, ntwk, above=0, below=0):
data = {}
for x, row in enumerate(edge_array):
for y in range(np.max(np.shape(edge_array[x]))):
if not edge_array[x, y] == 0:
if edge_array[x, y] != 0:
data["value"] = edge_array[x, y]
if data["value"] <= below or data["value"] >= above:
if edge_ntwk.has_edge(x + 1, y + 1):
Expand Down
2 changes: 1 addition & 1 deletion nipype/interfaces/dcmstack.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ def _run_interface(self, runtime):
meta_filter = dcmstack.make_key_regex_filter(exclude_regexes, include_regexes)
stack = dcmstack.DicomStack(meta_filter=meta_filter)
for src_path in src_paths:
if not imghdr.what(src_path) == "gif":
if imghdr.what(src_path) != "gif":
src_dcm = pydicom.dcmread(src_path, force=self.inputs.force_read)
stack.add_dcm(src_dcm)
nii = stack.to_nifti(embed_meta=True)
Expand Down
16 changes: 8 additions & 8 deletions nipype/interfaces/dipy/tests/test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,10 +109,10 @@ def test_create_interface_specs():
assert new_interface.__name__ == "MyInterface"
current_params = new_interface().get()
assert len(current_params) == 4
assert "params1" in current_params.keys()
assert "params2_files" in current_params.keys()
assert "params3" in current_params.keys()
assert "out_params" in current_params.keys()
assert "params1" in current_params
assert "params2_files" in current_params
assert "params3" in current_params
assert "out_params" in current_params


@pytest.mark.skipif(
Expand Down Expand Up @@ -184,10 +184,10 @@ def run(self, in_files, param1=1, out_dir="", out_ref="out1.txt"):
params_in = new_specs().inputs.get()
params_out = new_specs()._outputs().get()
assert len(params_in) == 4
assert "in_files" in params_in.keys()
assert "param1" in params_in.keys()
assert "out_dir" in params_out.keys()
assert "out_ref" in params_out.keys()
assert "in_files" in params_in
assert "param1" in params_in
assert "out_dir" in params_out
assert "out_ref" in params_out

with pytest.raises(ValueError):
new_specs().run()
Expand Down
5 changes: 1 addition & 4 deletions nipype/interfaces/freesurfer/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,4 @@ def no_freesurfer():
used with skipif to skip tests that will
fail if FreeSurfer is not installed"""

if Info.version() is None:
return True
else:
return False
return Info.version() is None
5 changes: 1 addition & 4 deletions nipype/interfaces/fsl/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,10 +262,7 @@ def no_fsl():
used with skipif to skip tests that will
fail if FSL is not installed"""

if Info.version() is None:
return True
else:
return False
return Info.version() is None


def no_fsl_course_data():
Expand Down
35 changes: 15 additions & 20 deletions nipype/interfaces/fsl/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,13 +142,12 @@
output_spec = Level1DesignOutputSpec

def _create_ev_file(self, evfname, evinfo):
f = open(evfname, "w")
for i in evinfo:
if len(i) == 3:
f.write(f"{i[0]:f} {i[1]:f} {i[2]:f}\n")
else:
f.write("%f\n" % i[0])
f.close()
with open(evfname, "w") as f:
for i in evinfo:
if len(i) == 3:
f.write(f"{i[0]:f} {i[1]:f} {i[2]:f}\n")
else:
f.write("%f\n" % i[0])

Check warning on line 150 in nipype/interfaces/fsl/model.py

View check run for this annotation

Codecov / codecov/patch

nipype/interfaces/fsl/model.py#L150

Added line #L150 was not covered by tests

def _create_ev_files(
self,
Expand Down Expand Up @@ -319,7 +318,7 @@

for fconidx in ftest_idx:
fval = 0
if con[0] in con_map.keys() and fconidx in con_map[con[0]]:
if con[0] in con_map and fconidx in con_map[con[0]]:
fval = 1
ev_txt += contrast_ftest_element.substitute(
cnum=ftest_idx.index(fconidx) + 1,
Expand Down Expand Up @@ -403,9 +402,8 @@
fsf_txt += cond_txt
fsf_txt += fsf_postscript.substitute(overwrite=1)

f = open(os.path.join(cwd, "run%d.fsf" % i), "w")
f.write(fsf_txt)
f.close()
with open(os.path.join(cwd, "run%d.fsf" % i), "w") as f:
f.write(fsf_txt)

Check warning on line 406 in nipype/interfaces/fsl/model.py

View check run for this annotation

Codecov / codecov/patch

nipype/interfaces/fsl/model.py#L406

Added line #L406 was not covered by tests

return runtime

Expand Down Expand Up @@ -946,9 +944,8 @@
for i, rundir in enumerate(ensure_list(self.inputs.feat_dirs)):
fsf_txt += fsf_dirs.substitute(runno=i + 1, rundir=os.path.abspath(rundir))
fsf_txt += fsf_footer.substitute()
f = open(os.path.join(os.getcwd(), "register.fsf"), "w")
f.write(fsf_txt)
f.close()
with open(os.path.join(os.getcwd(), "register.fsf"), "w") as f:
f.write(fsf_txt)

Check warning on line 948 in nipype/interfaces/fsl/model.py

View check run for this annotation

Codecov / codecov/patch

nipype/interfaces/fsl/model.py#L948

Added line #L948 was not covered by tests

return runtime

Expand Down Expand Up @@ -1414,9 +1411,8 @@

# write design files
for i, name in enumerate(["design.mat", "design.con", "design.grp"]):
f = open(os.path.join(cwd, name), "w")
f.write(txt[name])
f.close()
with open(os.path.join(cwd, name), "w") as f:
f.write(txt[name])

Check warning on line 1415 in nipype/interfaces/fsl/model.py

View check run for this annotation

Codecov / codecov/patch

nipype/interfaces/fsl/model.py#L1415

Added line #L1415 was not covered by tests

return runtime

Expand Down Expand Up @@ -1583,9 +1579,8 @@
if ("fts" in key) and (nfcons == 0):
continue
filename = key.replace("_", ".")
f = open(os.path.join(cwd, filename), "w")
f.write(val)
f.close()
with open(os.path.join(cwd, filename), "w") as f:
f.write(val)

Check warning on line 1583 in nipype/interfaces/fsl/model.py

View check run for this annotation

Codecov / codecov/patch

nipype/interfaces/fsl/model.py#L1583

Added line #L1583 was not covered by tests

return runtime

Expand Down
2 changes: 1 addition & 1 deletion nipype/interfaces/fsl/tests/test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ def test_gen_fname(args, desired_name):
cmd = fsl.FSLCommand(command="junk", output_type="NIFTI_GZ")
pth = os.getcwd()
fname = cmd._gen_fname("foo.nii.gz", **args)
if "dir" in desired_name.keys():
if "dir" in desired_name:
desired = os.path.join(desired_name["dir"], desired_name["file"])
else:
desired = os.path.join(pth, desired_name["file"])
Expand Down
Loading