diff --git a/nipype/algorithms/confounds.py b/nipype/algorithms/confounds.py index 5bd5a065e1..157d1e48d7 100644 --- a/nipype/algorithms/confounds.py +++ b/nipype/algorithms/confounds.py @@ -1290,11 +1290,8 @@ def combine_mask_files(mask_files, mask_method=None, mask_index=None): raise ValueError( f"mask_index {mask_index} must be less than number of mask files {len(mask_files)}" ) - masks = [] if mask_method == "none": - for filename in mask_files: - masks.append(nb.load(filename)) - return masks + return [nb.load(filename) for filename in mask_files] if mask_method == "union": mask = None diff --git a/nipype/algorithms/misc.py b/nipype/algorithms/misc.py index edc750f387..b7bbd3ce21 100644 --- a/nipype/algorithms/misc.py +++ b/nipype/algorithms/misc.py @@ -549,11 +549,9 @@ def maketypelist(rowheadings, shape, extraheadingBool, extraheading): if rowheadings: typelist.append(("heading", "a40")) if len(shape) > 1: - for idx in range(1, (min(shape) + 1)): - typelist.append((str(idx), float)) + typelist.extend((str(idx), float) for idx in range(1, (min(shape) + 1))) else: - for idx in range(1, (shape[0] + 1)): - typelist.append((str(idx), float)) + typelist.extend((str(idx), float) for idx in range(1, (shape[0] + 1))) if extraheadingBool: typelist.append((extraheading, "a40")) iflogger.info(typelist) @@ -714,8 +712,7 @@ def _run_interface(self, runtime): mx = shape[0] else: mx = 1 - for idx in range(mx): - extrafieldlist.append(self.inputs.extra_field) + extrafieldlist.extend(self.inputs.extra_field for idx in range(mx)) iflogger.info(len(extrafieldlist)) output[extraheading] = extrafieldlist iflogger.info(output) diff --git a/nipype/algorithms/modelgen.py b/nipype/algorithms/modelgen.py index 8487ac0264..17e2d2f65f 100644 --- a/nipype/algorithms/modelgen.py +++ b/nipype/algorithms/modelgen.py @@ -473,15 +473,15 @@ def _generate_design(self, infolist=None): """Generate design specification for a typical fmri paradigm""" realignment_parameters = [] if isdefined(self.inputs.realignment_parameters): - for parfile in self.inputs.realignment_parameters: - realignment_parameters.append( - np.apply_along_axis( - func1d=normalize_mc_params, - axis=1, - arr=np.loadtxt(parfile), - source=self.inputs.parameter_source, - ) + realignment_parameters.extend( + np.apply_along_axis( + func1d=normalize_mc_params, + axis=1, + arr=np.loadtxt(parfile), + source=self.inputs.parameter_source, ) + for parfile in self.inputs.realignment_parameters + ) outliers = [] if isdefined(self.inputs.outlier_files): for filename in self.inputs.outlier_files: diff --git a/nipype/interfaces/ants/registration.py b/nipype/interfaces/ants/registration.py index 6803eb94b9..41037ffc5f 100644 --- a/nipype/interfaces/ants/registration.py +++ b/nipype/interfaces/ants/registration.py @@ -190,10 +190,11 @@ def _transformation_constructor(self): delta_time = self.inputs.delta_time symmetry_type = self.inputs.symmetry_type retval = ["--transformation-model %s" % model] - parameters = [] - for elem in (step_length, time_step, delta_time, symmetry_type): - if elem is not traits.Undefined: - parameters.append("%#.2g" % elem) + parameters = [ + "%#.2g" % elem + for elem in (step_length, time_step, delta_time, symmetry_type) + if elem is not traits.Undefined + ] if len(parameters) > 0: if len(parameters) > 1: parameters = ",".join(parameters) @@ -1134,8 +1135,7 @@ def _format_registration(self): retval = [] for ii in range(len(self.inputs.transforms)): retval.append("--transform %s" % (self._format_transform(ii))) - for metric in self._format_metric(ii): - retval.append("--metric %s" % metric) + retval.extend("--metric %s" % metric for metric in self._format_metric(ii)) retval.append("--convergence %s" % self._format_convergence(ii)) if isdefined(self.inputs.sigma_units): retval.append( diff --git a/nipype/interfaces/ants/segmentation.py b/nipype/interfaces/ants/segmentation.py index d486d0b4af..f4b60ec11c 100644 --- a/nipype/interfaces/ants/segmentation.py +++ b/nipype/interfaces/ants/segmentation.py @@ -813,16 +813,15 @@ def _list_outputs(self): os.getcwd(), self.inputs.out_prefix + "BrainSegmentation0N4." + self.inputs.image_suffix, ) - posteriors = [] - for i in range(len(self.inputs.segmentation_priors)): - posteriors.append( - os.path.join( - os.getcwd(), - self.inputs.out_prefix - + "BrainSegmentationPosteriors%02d." % (i + 1) - + self.inputs.image_suffix, - ) + posteriors = [ + os.path.join( + os.getcwd(), + self.inputs.out_prefix + + "BrainSegmentationPosteriors%02d." % (i + 1) + + self.inputs.image_suffix, ) + for i in range(len(self.inputs.segmentation_priors)) + ] outputs["BrainSegmentationPosteriors"] = posteriors outputs["CorticalThickness"] = os.path.join( os.getcwd(), @@ -1488,15 +1487,13 @@ class JointFusion(ANTSCommand): def _format_arg(self, opt, spec, val): if opt == "exclusion_image_label": - retval = [] - for ii in range(len(self.inputs.exclusion_image_label)): - retval.append( - "-e {}[{}]".format( - self.inputs.exclusion_image_label[ii], - self.inputs.exclusion_image[ii], - ) + return " ".join( + "-e {}[{}]".format( + self.inputs.exclusion_image_label[ii], + self.inputs.exclusion_image[ii], ) - return " ".join(retval) + for ii in range(len(self.inputs.exclusion_image_label)) + ) if opt == "patch_radius": return f"-p {self._format_xarray(val)}" if opt == "search_radius": diff --git a/nipype/interfaces/base/core.py b/nipype/interfaces/base/core.py index 3e17893aef..02c4a1ab7e 100644 --- a/nipype/interfaces/base/core.py +++ b/nipype/interfaces/base/core.py @@ -1047,10 +1047,11 @@ def __init__(self, check_import=True, *args, **kwargs): if check_import: import pkgutil - failed_imports = [] - for pkg in (self._pkg,) + tuple(self.imports): - if pkgutil.find_loader(pkg) is None: - failed_imports.append(pkg) + failed_imports = [ + pkg + for pkg in (self._pkg,) + tuple(self.imports) + if pkgutil.find_loader(pkg) is None + ] if failed_imports: iflogger.warning( "Unable to import %s; %s interface may fail to run", diff --git a/nipype/interfaces/cmtk/cmtk.py b/nipype/interfaces/cmtk/cmtk.py index 9bc3a36d6d..baf1bccaae 100644 --- a/nipype/interfaces/cmtk/cmtk.py +++ b/nipype/interfaces/cmtk/cmtk.py @@ -328,9 +328,11 @@ def cmat( else: final_fibers_indices = final_fibers_idx - for idx in final_fibers_indices: + finalfiberlength.extend( # compute length of fiber - finalfiberlength.append(length(fib[idx][0])) + length(fib[idx][0]) + for idx in final_fibers_indices + ) # convert to array final_fiberlength_array = np.array(finalfiberlength) @@ -463,9 +465,7 @@ def cmat( def save_fibers(oldhdr, oldfib, fname, indices): """Stores a new trackvis file fname using only given indices""" hdrnew = oldhdr.copy() - outstreams = [] - for i in indices: - outstreams.append(oldfib[i]) + outstreams = [oldfib[i] for i in indices] n_fib_out = len(outstreams) hdrnew["n_count"] = n_fib_out iflogger.info("Writing final non-orphan fibers as %s", fname) diff --git a/nipype/interfaces/dipy/simulate.py b/nipype/interfaces/dipy/simulate.py index 24681b6e93..3fc812e365 100644 --- a/nipype/interfaces/dipy/simulate.py +++ b/nipype/interfaces/dipy/simulate.py @@ -222,20 +222,17 @@ def _run_interface(self, runtime): mevals = [sf_evals] * nsticks + [[ba_evals[d]] * 3 for d in range(nballs)] b0 = b0_im.get_fdata()[msk > 0] - args = [] - for i in range(nvox): - args.append( - { - "fractions": fracs[i, ...].tolist(), - "sticks": [ - tuple(dirs[i, j : j + 3]) for j in range(nsticks + nballs) - ], - "gradients": gtab, - "mevals": mevals, - "S0": b0[i], - "snr": self.inputs.snr, - } - ) + args = [ + { + "fractions": fracs[i, ...].tolist(), + "sticks": [tuple(dirs[i, j : j + 3]) for j in range(nsticks + nballs)], + "gradients": gtab, + "mevals": mevals, + "S0": b0[i], + "snr": self.inputs.snr, + } + for i in range(nvox) + ] n_proc = self.inputs.n_proc if n_proc == 0: diff --git a/nipype/interfaces/freesurfer/preprocess.py b/nipype/interfaces/freesurfer/preprocess.py index a2acacb765..80b8703c31 100644 --- a/nipype/interfaces/freesurfer/preprocess.py +++ b/nipype/interfaces/freesurfer/preprocess.py @@ -594,10 +594,10 @@ def _list_outputs(self): raise Exception( "Not taking frame manipulations into account- please warn the developers" ) - outfiles = [] outfile = self._get_outfilename() - for i in range(tp): - outfiles.append(fname_presuffix(outfile, suffix="%03d" % (i + 1))) + outfiles = [ + fname_presuffix(outfile, suffix="%03d" % (i + 1)) for i in range(tp) + ] outfile = outfiles outputs["out_file"] = outfile return outputs diff --git a/nipype/interfaces/fsl/fix.py b/nipype/interfaces/fsl/fix.py index 503355d4b1..2799c53104 100644 --- a/nipype/interfaces/fsl/fix.py +++ b/nipype/interfaces/fsl/fix.py @@ -105,11 +105,11 @@ class TrainingSetCreator(BaseInterface): _always_run = True def _run_interface(self, runtime): - mel_icas = [] - for item in self.inputs.mel_icas_in: - if os.path.exists(os.path.join(item, "hand_labels_noise.txt")): - mel_icas.append(item) - + mel_icas = [ + item + for item in self.inputs.mel_icas_in + if os.path.exists(os.path.join(item, "hand_labels_noise.txt")) + ] if len(mel_icas) == 0: raise Exception( "%s did not find any hand_labels_noise.txt files in the following directories: %s" @@ -119,10 +119,11 @@ def _run_interface(self, runtime): return runtime def _list_outputs(self): - mel_icas = [] - for item in self.inputs.mel_icas_in: - if os.path.exists(os.path.join(item, "hand_labels_noise.txt")): - mel_icas.append(item) + mel_icas = [ + item + for item in self.inputs.mel_icas_in + if os.path.exists(os.path.join(item, "hand_labels_noise.txt")) + ] outputs = self._outputs().get() outputs["mel_icas_out"] = mel_icas return outputs diff --git a/nipype/interfaces/io.py b/nipype/interfaces/io.py index 4ed411b1b5..34dbc1b461 100644 --- a/nipype/interfaces/io.py +++ b/nipype/interfaces/io.py @@ -959,10 +959,7 @@ def _list_outputs(self): if isdefined(self.inputs.bucket_path): template = os.path.join(self.inputs.bucket_path, template) if not args: - filelist = [] - for fname in bkt_files: - if re.match(template, fname): - filelist.append(fname) + filelist = [fname for fname in bkt_files if re.match(template, fname)] if len(filelist) == 0: msg = "Output key: {} Template: {} returned no files".format( key, @@ -2720,16 +2717,14 @@ class JSONFileGrabber(IOBase): def _list_outputs(self): import simplejson - outputs = {} if isdefined(self.inputs.in_file): with open(self.inputs.in_file) as f: - data = simplejson.load(f) + outputs = simplejson.load(f) - if not isinstance(data, dict): + if not isinstance(outputs, dict): raise RuntimeError("JSON input has no dictionary structure") - - for key, value in list(data.items()): - outputs[key] = value + else: + outputs = {} if isdefined(self.inputs.defaults): defaults = self.inputs.defaults diff --git a/nipype/interfaces/mne/base.py b/nipype/interfaces/mne/base.py index 39d23e253c..6e9b766305 100644 --- a/nipype/interfaces/mne/base.py +++ b/nipype/interfaces/mne/base.py @@ -132,9 +132,7 @@ def _list_outputs(self): if val: value_list = simplify_list(val) if isinstance(value_list, list): - out_files = [] - for value in value_list: - out_files.append(op.abspath(value)) + out_files = [op.abspath(value) for value in value_list] elif isinstance(value_list, (str, bytes)): out_files = op.abspath(value_list) else: diff --git a/nipype/interfaces/nilearn.py b/nipype/interfaces/nilearn.py index e73fe7572e..9d78517f79 100644 --- a/nipype/interfaces/nilearn.py +++ b/nipype/interfaces/nilearn.py @@ -98,9 +98,7 @@ class SignalExtraction(NilearnBaseInterface, SimpleInterface): def _run_interface(self, runtime): maskers = self._process_inputs() - signals = [] - for masker in maskers: - signals.append(masker.fit_transform(self.inputs.in_file)) + signals = [masker.fit_transform(self.inputs.in_file) for masker in maskers] region_signals = np.hstack(signals) output = np.vstack((self.inputs.class_labels, region_signals.astype(str))) @@ -127,10 +125,10 @@ def _process_inputs(self): else: # 4d labels n_labels = label_data.shape[3] if self.inputs.incl_shared_variance: # independent computation - for img in nli.iter_img(label_data): - maskers.append( - nl.NiftiMapsMasker(self._4d(img.dataobj, img.affine)) - ) + maskers.extend( + nl.NiftiMapsMasker(self._4d(img.dataobj, img.affine)) + for img in nli.iter_img(label_data) + ) else: # one computation fitting all maskers.append(nl.NiftiMapsMasker(label_data)) diff --git a/nipype/interfaces/nipy/model.py b/nipype/interfaces/nipy/model.py index 4d474b199e..c99a4acaea 100644 --- a/nipype/interfaces/nipy/model.py +++ b/nipype/interfaces/nipy/model.py @@ -138,10 +138,7 @@ def _run_interface(self, runtime): hpf = 0 drift_model = "Blank" - reg_names = [] - for reg in session_info[0]["regress"]: - reg_names.append(reg["name"]) - + reg_names = [reg["name"] for reg in session_info[0]["regress"]] reg_vals = np.zeros((nscans, len(reg_names))) for i in range(len(reg_names)): reg_vals[:, i] = np.array(session_info[0]["regress"][i]["val"]).reshape( diff --git a/nipype/pipeline/engine/utils.py b/nipype/pipeline/engine/utils.py index b52fdcf25e..3601290cd9 100644 --- a/nipype/pipeline/engine/utils.py +++ b/nipype/pipeline/engine/utils.py @@ -1489,13 +1489,12 @@ def clean_working_directory( files2remove.append(f) else: if not str2bool(config["execution"]["keep_inputs"]): - input_files = [] - inputdict = inputs.trait_get() - input_files.extend(walk_outputs(inputdict)) - input_files = [path for path, type in input_files if type == "f"] - for f in walk_files(cwd): - if f in input_files and f not in needed_files: - files2remove.append(f) + input_files = { + path for path, type in walk_outputs(inputs.trait_get()) if type == "f" + } + files2remove.extend( + f for f in walk_files(cwd) if f in input_files and f not in needed_files + ) logger.debug("Removing files: %s", ";".join(files2remove)) for f in files2remove: os.remove(f) @@ -1717,9 +1716,7 @@ def topological_sort(graph, depth_first=False): components = nx.connected_components(G) for desc in components: group += 1 - indices = [] - for node in desc: - indices.append(nodesort.index(node)) + indices = [nodesort.index(node) for node in desc] nodes.extend( np.array(nodesort)[np.array(indices)[np.argsort(indices)]].tolist() ) diff --git a/nipype/pipeline/engine/workflows.py b/nipype/pipeline/engine/workflows.py index 4729206dfb..97bd8cce9e 100644 --- a/nipype/pipeline/engine/workflows.py +++ b/nipype/pipeline/engine/workflows.py @@ -301,11 +301,12 @@ def disconnect(self, *args): edge_data = self._graph.get_edge_data(srcnode, dstnode, {"connect": []}) ed_conns = [(c[0], c[1]) for c in edge_data["connect"]] - remove = [] - for edge in conn: - if edge in ed_conns: - # idx = ed_conns.index(edge) - remove.append((edge[0], edge[1])) + remove = [ + # idx = ed_conns.index(edge) + (edge[0], edge[1]) + for edge in conn + if edge in ed_conns + ] logger.debug("disconnect(): remove list %s", str(remove)) for el in remove: @@ -571,8 +572,9 @@ def export( ) lines.append(connect_template2 % line_args) functionlines = ["# Functions"] - for function in functions: - functionlines.append(pickle.loads(function).rstrip()) + functionlines.extend( + pickle.loads(function).rstrip() for function in functions + ) all_lines = importlines + functionlines + lines if not filename: @@ -843,10 +845,11 @@ def _get_inputs(self): if isinstance(node, Workflow): setattr(inputdict, node.name, node.inputs) else: - taken_inputs = [] - for _, _, d in self._graph.in_edges(nbunch=node, data=True): - for cd in d["connect"]: - taken_inputs.append(cd[1]) + taken_inputs = [ + cd[1] + for _, _, d in self._graph.in_edges(nbunch=node, data=True) + for cd in d["connect"] + ] unconnectedinputs = TraitedSpec() for key, trait in list(node.inputs.items()): if key not in taken_inputs: @@ -1088,8 +1091,10 @@ def _get_dot( subnodefullname = ".".join(hierarchy + [subnode.fullname]) nodename = nodefullname.replace(".", "_") subnodename = subnodefullname.replace(".", "_") - for _ in self._graph.get_edge_data(node, subnode)["connect"]: - dotlist.append(f"{nodename} -> {subnodename};") + dotlist.extend( + f"{nodename} -> {subnodename};" + for _ in self._graph.get_edge_data(node, subnode)["connect"] + ) logger.debug("connection: %s", dotlist[-1]) # add between workflow connections for u, v, d in self._graph.edges(data=True): diff --git a/nipype/pipeline/plugins/somaflow.py b/nipype/pipeline/plugins/somaflow.py index 5333eb6b28..2105204979 100644 --- a/nipype/pipeline/plugins/somaflow.py +++ b/nipype/pipeline/plugins/somaflow.py @@ -22,14 +22,18 @@ def __init__(self, plugin_args=None): super().__init__(plugin_args=plugin_args) def _submit_graph(self, pyfiles, dependencies, nodes): - jobs = [] - soma_deps = [] - for fname in pyfiles: - name = os.path.splitext(os.path.split(fname)[1])[0] - jobs.append(Job(command=[sys.executable, fname], name=name)) - for key, values in list(dependencies.items()): - for val in values: - soma_deps.append((jobs[val], jobs[key])) + jobs = [ + Job( + command=[sys.executable, fname], + name=os.path.splitext(os.path.split(fname)[1])[0], + ) + for fname in pyfiles + ] + soma_deps = [ + (jobs[val], jobs[key]) + for key, values in dependencies.items() + for val in values + ] wf = Workflow(jobs, soma_deps) logger.info("serializing workflow") diff --git a/nipype/utils/filemanip.py b/nipype/utils/filemanip.py index c6f270e8c7..e97b93c3f9 100644 --- a/nipype/utils/filemanip.py +++ b/nipype/utils/filemanip.py @@ -126,10 +126,9 @@ def fname_presuffix(fname, prefix="", suffix="", newpath=None, use_ext=True): def fnames_presuffix(fnames, prefix="", suffix="", newpath=None, use_ext=True): """Calls fname_presuffix for a list of files.""" - f2 = [] - for fname in fnames: - f2.append(fname_presuffix(fname, prefix, suffix, newpath, use_ext)) - return f2 + return [ + fname_presuffix(fname, prefix, suffix, newpath, use_ext) for fname in fnames + ] def hash_rename(filename, hashvalue): @@ -445,14 +444,15 @@ def get_related_files(filename, include_this_file=True): include_this_file : bool If true, output includes the input filename. """ - related_files = [] path, name, this_type = split_filename(filename) - for type_set in related_filetype_sets: - if this_type in type_set: - for related_type in type_set: - if include_this_file or related_type != this_type: - related_files.append(op.join(path, name + related_type)) - if not len(related_files): + related_files = [ + op.join(path, f"{name}{related_type}") + for type_set in related_filetype_sets + if this_type in type_set + for related_type in type_set + if include_this_file or related_type != this_type + ] + if not related_files: related_files = [filename] return related_files @@ -714,17 +714,11 @@ def write_rst_header(header, level=0): def write_rst_list(items, prefix=""): - out = [] - for item in ensure_list(items): - out.append(f"{prefix} {item}") - return "\n".join(out) + "\n\n" + return "\n".join(f"{prefix} {item}" for item in ensure_list(items)) + "\n\n" def write_rst_dict(info, prefix=""): - out = [] - for key, value in sorted(info.items()): - out.append(f"{prefix}* {key} : {value}") - return "\n".join(out) + "\n\n" + return "\n".join(f"{prefix}* {k} : {v}" for k, v in sorted(info.items())) + "\n\n" def dist_is_editable(dist): diff --git a/nipype/utils/misc.py b/nipype/utils/misc.py index baafbf29d2..95f4b408c2 100644 --- a/nipype/utils/misc.py +++ b/nipype/utils/misc.py @@ -246,10 +246,7 @@ def unflatten(in_list, prev_structure): if not isinstance(prev_structure, list): return next(in_list) - out = [] - for item in prev_structure: - out.append(unflatten(in_list, item)) - return out + return [unflatten(in_list, item) for item in prev_structure] def normalize_mc_params(params, source): diff --git a/nipype/utils/nipype2boutiques.py b/nipype/utils/nipype2boutiques.py index df06f9dd39..90785f447e 100644 --- a/nipype/utils/nipype2boutiques.py +++ b/nipype/utils/nipype2boutiques.py @@ -473,13 +473,11 @@ def get_boutiques_output(outputs, name, spec, interface, tool_inputs): output["list"] = True if output_value: # Check if all extensions are the same - extensions = [] - for val in output_value: - extensions.append(os.path.splitext(val)[1]) + extensions = {os.path.splitext(val)[1] for val in output_value} # If extensions all the same, set path template as # wildcard + extension. Otherwise just use a wildcard - if len(set(extensions)) == 1: - output["path-template"] = "*" + extensions[0] + if len(extensions) == 1: + output["path-template"] = "*" + extensions.pop() else: output["path-template"] = "*" return output @@ -572,8 +570,9 @@ def generate_custom_inputs(desc_inputs): if desc_input["type"] == "Flag": custom_input_dicts.append({desc_input["id"]: True}) elif desc_input.get("value-choices") and not desc_input.get("list"): - for value in desc_input["value-choices"]: - custom_input_dicts.append({desc_input["id"]: value}) + custom_input_dicts.extend( + {desc_input["id"]: value} for value in desc_input["value-choices"] + ) return custom_input_dicts