Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

STY: Apply ruff/Perflint rules (PERF) #3674

Merged
merged 3 commits into from
Oct 6, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 1 addition & 4 deletions nipype/algorithms/confounds.py
Original file line number Diff line number Diff line change
Expand Up @@ -1290,11 +1290,8 @@ def combine_mask_files(mask_files, mask_method=None, mask_index=None):
raise ValueError(
f"mask_index {mask_index} must be less than number of mask files {len(mask_files)}"
)
masks = []
if mask_method == "none":
for filename in mask_files:
masks.append(nb.load(filename))
return masks
return [nb.load(filename) for filename in mask_files]

if mask_method == "union":
mask = None
Expand Down
9 changes: 3 additions & 6 deletions nipype/algorithms/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -549,11 +549,9 @@ def maketypelist(rowheadings, shape, extraheadingBool, extraheading):
if rowheadings:
typelist.append(("heading", "a40"))
if len(shape) > 1:
for idx in range(1, (min(shape) + 1)):
typelist.append((str(idx), float))
typelist.extend((str(idx), float) for idx in range(1, (min(shape) + 1)))
else:
for idx in range(1, (shape[0] + 1)):
typelist.append((str(idx), float))
typelist.extend((str(idx), float) for idx in range(1, (shape[0] + 1)))
if extraheadingBool:
typelist.append((extraheading, "a40"))
iflogger.info(typelist)
Expand Down Expand Up @@ -714,8 +712,7 @@ def _run_interface(self, runtime):
mx = shape[0]
else:
mx = 1
for idx in range(mx):
extrafieldlist.append(self.inputs.extra_field)
extrafieldlist.extend(self.inputs.extra_field for idx in range(mx))
iflogger.info(len(extrafieldlist))
output[extraheading] = extrafieldlist
iflogger.info(output)
Expand Down
16 changes: 8 additions & 8 deletions nipype/algorithms/modelgen.py
Original file line number Diff line number Diff line change
Expand Up @@ -473,15 +473,15 @@ def _generate_design(self, infolist=None):
"""Generate design specification for a typical fmri paradigm"""
realignment_parameters = []
if isdefined(self.inputs.realignment_parameters):
for parfile in self.inputs.realignment_parameters:
realignment_parameters.append(
np.apply_along_axis(
func1d=normalize_mc_params,
axis=1,
arr=np.loadtxt(parfile),
source=self.inputs.parameter_source,
)
realignment_parameters.extend(
np.apply_along_axis(
func1d=normalize_mc_params,
axis=1,
arr=np.loadtxt(parfile),
source=self.inputs.parameter_source,
)
for parfile in self.inputs.realignment_parameters
)
outliers = []
if isdefined(self.inputs.outlier_files):
for filename in self.inputs.outlier_files:
Expand Down
12 changes: 6 additions & 6 deletions nipype/interfaces/ants/registration.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,10 +190,11 @@ def _transformation_constructor(self):
delta_time = self.inputs.delta_time
symmetry_type = self.inputs.symmetry_type
retval = ["--transformation-model %s" % model]
parameters = []
for elem in (step_length, time_step, delta_time, symmetry_type):
if elem is not traits.Undefined:
parameters.append("%#.2g" % elem)
parameters = [
"%#.2g" % elem
for elem in (step_length, time_step, delta_time, symmetry_type)
if elem is not traits.Undefined
]
if len(parameters) > 0:
if len(parameters) > 1:
parameters = ",".join(parameters)
Expand Down Expand Up @@ -1134,8 +1135,7 @@ def _format_registration(self):
retval = []
for ii in range(len(self.inputs.transforms)):
retval.append("--transform %s" % (self._format_transform(ii)))
for metric in self._format_metric(ii):
retval.append("--metric %s" % metric)
retval.extend("--metric %s" % metric for metric in self._format_metric(ii))
retval.append("--convergence %s" % self._format_convergence(ii))
if isdefined(self.inputs.sigma_units):
retval.append(
Expand Down
31 changes: 14 additions & 17 deletions nipype/interfaces/ants/segmentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -813,16 +813,15 @@ def _list_outputs(self):
os.getcwd(),
self.inputs.out_prefix + "BrainSegmentation0N4." + self.inputs.image_suffix,
)
posteriors = []
for i in range(len(self.inputs.segmentation_priors)):
posteriors.append(
os.path.join(
os.getcwd(),
self.inputs.out_prefix
+ "BrainSegmentationPosteriors%02d." % (i + 1)
+ self.inputs.image_suffix,
)
posteriors = [
os.path.join(
os.getcwd(),
self.inputs.out_prefix
+ "BrainSegmentationPosteriors%02d." % (i + 1)
+ self.inputs.image_suffix,
)
for i in range(len(self.inputs.segmentation_priors))
]
outputs["BrainSegmentationPosteriors"] = posteriors
outputs["CorticalThickness"] = os.path.join(
os.getcwd(),
Expand Down Expand Up @@ -1488,15 +1487,13 @@ class JointFusion(ANTSCommand):

def _format_arg(self, opt, spec, val):
if opt == "exclusion_image_label":
retval = []
for ii in range(len(self.inputs.exclusion_image_label)):
retval.append(
"-e {}[{}]".format(
self.inputs.exclusion_image_label[ii],
self.inputs.exclusion_image[ii],
)
return " ".join(
"-e {}[{}]".format(
self.inputs.exclusion_image_label[ii],
self.inputs.exclusion_image[ii],
)
return " ".join(retval)
for ii in range(len(self.inputs.exclusion_image_label))
)
if opt == "patch_radius":
return f"-p {self._format_xarray(val)}"
if opt == "search_radius":
Expand Down
9 changes: 5 additions & 4 deletions nipype/interfaces/base/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -1047,10 +1047,11 @@ def __init__(self, check_import=True, *args, **kwargs):
if check_import:
import pkgutil

failed_imports = []
for pkg in (self._pkg,) + tuple(self.imports):
if pkgutil.find_loader(pkg) is None:
failed_imports.append(pkg)
failed_imports = [
pkg
for pkg in (self._pkg,) + tuple(self.imports)
if pkgutil.find_loader(pkg) is None
]
if failed_imports:
iflogger.warning(
"Unable to import %s; %s interface may fail to run",
Expand Down
10 changes: 5 additions & 5 deletions nipype/interfaces/cmtk/cmtk.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,9 +328,11 @@ def cmat(
else:
final_fibers_indices = final_fibers_idx

for idx in final_fibers_indices:
finalfiberlength.extend(
# compute length of fiber
finalfiberlength.append(length(fib[idx][0]))
length(fib[idx][0])
for idx in final_fibers_indices
)

# convert to array
final_fiberlength_array = np.array(finalfiberlength)
Expand Down Expand Up @@ -463,9 +465,7 @@ def cmat(
def save_fibers(oldhdr, oldfib, fname, indices):
"""Stores a new trackvis file fname using only given indices"""
hdrnew = oldhdr.copy()
outstreams = []
for i in indices:
outstreams.append(oldfib[i])
outstreams = [oldfib[i] for i in indices]
n_fib_out = len(outstreams)
hdrnew["n_count"] = n_fib_out
iflogger.info("Writing final non-orphan fibers as %s", fname)
Expand Down
25 changes: 11 additions & 14 deletions nipype/interfaces/dipy/simulate.py
Original file line number Diff line number Diff line change
Expand Up @@ -222,20 +222,17 @@ def _run_interface(self, runtime):
mevals = [sf_evals] * nsticks + [[ba_evals[d]] * 3 for d in range(nballs)]

b0 = b0_im.get_fdata()[msk > 0]
args = []
for i in range(nvox):
args.append(
{
"fractions": fracs[i, ...].tolist(),
"sticks": [
tuple(dirs[i, j : j + 3]) for j in range(nsticks + nballs)
],
"gradients": gtab,
"mevals": mevals,
"S0": b0[i],
"snr": self.inputs.snr,
}
)
args = [
{
"fractions": fracs[i, ...].tolist(),
"sticks": [tuple(dirs[i, j : j + 3]) for j in range(nsticks + nballs)],
"gradients": gtab,
"mevals": mevals,
"S0": b0[i],
"snr": self.inputs.snr,
}
for i in range(nvox)
]

n_proc = self.inputs.n_proc
if n_proc == 0:
Expand Down
6 changes: 3 additions & 3 deletions nipype/interfaces/freesurfer/preprocess.py
Original file line number Diff line number Diff line change
Expand Up @@ -594,10 +594,10 @@ def _list_outputs(self):
raise Exception(
"Not taking frame manipulations into account- please warn the developers"
)
outfiles = []
outfile = self._get_outfilename()
for i in range(tp):
outfiles.append(fname_presuffix(outfile, suffix="%03d" % (i + 1)))
outfiles = [
fname_presuffix(outfile, suffix="%03d" % (i + 1)) for i in range(tp)
]
outfile = outfiles
outputs["out_file"] = outfile
return outputs
Expand Down
19 changes: 10 additions & 9 deletions nipype/interfaces/fsl/fix.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,11 +105,11 @@ class TrainingSetCreator(BaseInterface):
_always_run = True

def _run_interface(self, runtime):
mel_icas = []
for item in self.inputs.mel_icas_in:
if os.path.exists(os.path.join(item, "hand_labels_noise.txt")):
mel_icas.append(item)

mel_icas = [
item
for item in self.inputs.mel_icas_in
if os.path.exists(os.path.join(item, "hand_labels_noise.txt"))
]
if len(mel_icas) == 0:
raise Exception(
"%s did not find any hand_labels_noise.txt files in the following directories: %s"
Expand All @@ -119,10 +119,11 @@ def _run_interface(self, runtime):
return runtime

def _list_outputs(self):
mel_icas = []
for item in self.inputs.mel_icas_in:
if os.path.exists(os.path.join(item, "hand_labels_noise.txt")):
mel_icas.append(item)
mel_icas = [
item
for item in self.inputs.mel_icas_in
if os.path.exists(os.path.join(item, "hand_labels_noise.txt"))
]
outputs = self._outputs().get()
outputs["mel_icas_out"] = mel_icas
return outputs
Expand Down
15 changes: 5 additions & 10 deletions nipype/interfaces/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -959,10 +959,7 @@ def _list_outputs(self):
if isdefined(self.inputs.bucket_path):
template = os.path.join(self.inputs.bucket_path, template)
if not args:
filelist = []
for fname in bkt_files:
if re.match(template, fname):
filelist.append(fname)
filelist = [fname for fname in bkt_files if re.match(template, fname)]
if len(filelist) == 0:
msg = "Output key: {} Template: {} returned no files".format(
key,
Expand Down Expand Up @@ -2720,16 +2717,14 @@ class JSONFileGrabber(IOBase):
def _list_outputs(self):
import simplejson

outputs = {}
if isdefined(self.inputs.in_file):
with open(self.inputs.in_file) as f:
data = simplejson.load(f)
outputs = simplejson.load(f)

if not isinstance(data, dict):
if not isinstance(outputs, dict):
raise RuntimeError("JSON input has no dictionary structure")

for key, value in list(data.items()):
outputs[key] = value
else:
outputs = {}

if isdefined(self.inputs.defaults):
defaults = self.inputs.defaults
Expand Down
4 changes: 1 addition & 3 deletions nipype/interfaces/mne/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,9 +132,7 @@ def _list_outputs(self):
if val:
value_list = simplify_list(val)
if isinstance(value_list, list):
out_files = []
for value in value_list:
out_files.append(op.abspath(value))
out_files = [op.abspath(value) for value in value_list]
elif isinstance(value_list, (str, bytes)):
out_files = op.abspath(value_list)
else:
Expand Down
12 changes: 5 additions & 7 deletions nipype/interfaces/nilearn.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,9 +98,7 @@ class SignalExtraction(NilearnBaseInterface, SimpleInterface):
def _run_interface(self, runtime):
maskers = self._process_inputs()

signals = []
for masker in maskers:
signals.append(masker.fit_transform(self.inputs.in_file))
signals = [masker.fit_transform(self.inputs.in_file) for masker in maskers]
region_signals = np.hstack(signals)

output = np.vstack((self.inputs.class_labels, region_signals.astype(str)))
Expand All @@ -127,10 +125,10 @@ def _process_inputs(self):
else: # 4d labels
n_labels = label_data.shape[3]
if self.inputs.incl_shared_variance: # independent computation
for img in nli.iter_img(label_data):
maskers.append(
nl.NiftiMapsMasker(self._4d(img.dataobj, img.affine))
)
maskers.extend(
nl.NiftiMapsMasker(self._4d(img.dataobj, img.affine))
for img in nli.iter_img(label_data)
)
else: # one computation fitting all
maskers.append(nl.NiftiMapsMasker(label_data))

Expand Down
5 changes: 1 addition & 4 deletions nipype/interfaces/nipy/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,10 +138,7 @@ def _run_interface(self, runtime):
hpf = 0
drift_model = "Blank"

reg_names = []
for reg in session_info[0]["regress"]:
reg_names.append(reg["name"])

reg_names = [reg["name"] for reg in session_info[0]["regress"]]
reg_vals = np.zeros((nscans, len(reg_names)))
for i in range(len(reg_names)):
reg_vals[:, i] = np.array(session_info[0]["regress"][i]["val"]).reshape(
Expand Down
17 changes: 7 additions & 10 deletions nipype/pipeline/engine/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1489,13 +1489,12 @@ def clean_working_directory(
files2remove.append(f)
else:
if not str2bool(config["execution"]["keep_inputs"]):
input_files = []
inputdict = inputs.trait_get()
input_files.extend(walk_outputs(inputdict))
input_files = [path for path, type in input_files if type == "f"]
for f in walk_files(cwd):
if f in input_files and f not in needed_files:
files2remove.append(f)
input_files = {
path for path, type in walk_outputs(inputs.trait_get()) if type == "f"
}
files2remove.extend(
f for f in walk_files(cwd) if f in input_files and f not in needed_files
)
logger.debug("Removing files: %s", ";".join(files2remove))
for f in files2remove:
os.remove(f)
Expand Down Expand Up @@ -1717,9 +1716,7 @@ def topological_sort(graph, depth_first=False):
components = nx.connected_components(G)
for desc in components:
group += 1
indices = []
for node in desc:
indices.append(nodesort.index(node))
indices = [nodesort.index(node) for node in desc]
nodes.extend(
np.array(nodesort)[np.array(indices)[np.argsort(indices)]].tolist()
)
Expand Down
Loading