diff --git a/nipype/interfaces/base/tests/test_resource_monitor.py b/nipype/interfaces/base/tests/test_resource_monitor.py index 8ecd5269e9..5e2365bd39 100644 --- a/nipype/interfaces/base/tests/test_resource_monitor.py +++ b/nipype/interfaces/base/tests/test_resource_monitor.py @@ -55,7 +55,9 @@ class UseResources(CommandLine): @pytest.mark.skip(reason="inconsistent readings") @pytest.mark.skipif(os.getenv("CI_SKIP_TEST", False), reason="disabled in CI tests") -@pytest.mark.parametrize(("mem_gb", "n_procs"), [(0.5, 3), (2.2, 8), (0.8, 4), (1.5, 1)]) +@pytest.mark.parametrize( + ("mem_gb", "n_procs"), [(0.5, 3), (2.2, 8), (0.8, 4), (1.5, 1)] +) def test_cmdline_profiling(tmpdir, mem_gb, n_procs, use_resource_monitor): """ Test runtime profiler correctly records workflow RAM/CPUs consumption @@ -80,7 +82,9 @@ def test_cmdline_profiling(tmpdir, mem_gb, n_procs, use_resource_monitor): @pytest.mark.skipif( True, reason="test disabled temporarily, until function profiling works" ) -@pytest.mark.parametrize(("mem_gb", "n_procs"), [(0.5, 3), (2.2, 8), (0.8, 4), (1.5, 1)]) +@pytest.mark.parametrize( + ("mem_gb", "n_procs"), [(0.5, 3), (2.2, 8), (0.8, 4), (1.5, 1)] +) def test_function_profiling(tmpdir, mem_gb, n_procs, use_resource_monitor): """ Test runtime profiler correctly records workflow RAM/CPUs consumption diff --git a/nipype/interfaces/spm/utils.py b/nipype/interfaces/spm/utils.py index 76944893e1..bc6a06edc6 100644 --- a/nipype/interfaces/spm/utils.py +++ b/nipype/interfaces/spm/utils.py @@ -105,22 +105,17 @@ def _make_matlab_command(self, _): self.inputs.mat = self._make_mat_file() if not isdefined(self.inputs.invmat): self.inputs.invmat = self._make_inv_file() - script = """ - target = '{}'; - moving = '{}'; + script = f""" + target = '{self.inputs.target}'; + moving = '{self.inputs.moving}'; targetv = spm_vol(target); movingv = spm_vol(moving); x = spm_coreg(targetv, movingv); M = spm_matrix(x); - save('{}' , 'M' ); + save('{self.inputs.mat}' , 'M' ); M = inv(M); - save('{}','M') - """.format( - self.inputs.target, - self.inputs.moving, - self.inputs.mat, - self.inputs.invmat, - ) + save('{self.inputs.invmat}','M') + """ return script def _list_outputs(self): @@ -166,10 +161,10 @@ def _make_matlab_command(self, _): """checks for SPM, generates script""" outputs = self._list_outputs() self.inputs.out_file = outputs["out_file"] - script = """ - infile = '{}'; - outfile = '{}' - transform = load('{}'); + script = f""" + infile = '{self.inputs.in_file}'; + outfile = '{self.inputs.out_file}' + transform = load('{self.inputs.mat}'); V = spm_vol(infile); X = spm_read_vols(V); @@ -178,11 +173,7 @@ def _make_matlab_command(self, _): V.fname = fullfile(outfile); spm_write_vol(V,X); - """.format( - self.inputs.in_file, - self.inputs.out_file, - self.inputs.mat, - ) + """ # img_space = spm_get_space(infile); # spm_get_space(infile, transform.M * img_space); return script diff --git a/nipype/interfaces/workbench/metric.py b/nipype/interfaces/workbench/metric.py index 07e068e901..50e4300cd5 100644 --- a/nipype/interfaces/workbench/metric.py +++ b/nipype/interfaces/workbench/metric.py @@ -149,8 +149,7 @@ def _format_arg(self, opt, spec, val): if opt in ["current_area", "new_area"]: if not self.inputs.area_surfs and not self.inputs.area_metrics: raise ValueError( - "{} was set but neither area_surfs or" - " area_metrics were set".format(opt) + f"{opt} was set but neither area_surfs or area_metrics were set" ) if opt == "method": if ( diff --git a/nipype/pipeline/engine/utils.py b/nipype/pipeline/engine/utils.py index fa785c6bae..ae7c057b5c 100644 --- a/nipype/pipeline/engine/utils.py +++ b/nipype/pipeline/engine/utils.py @@ -369,13 +369,7 @@ def format_node(node, format="python", include_config=False): args = ", ".join(filled_args) klass_name = klass.__class__.__name__ if isinstance(node, MapNode): - nodedef = '{} = MapNode({}({}), iterfield={}, name="{}")'.format( - name, - klass_name, - args, - node.iterfield, - name, - ) + nodedef = f'{name} = MapNode({klass_name}({args}), iterfield={node.iterfield}, name="{name}")' else: nodedef = f'{name} = Node({klass_name}({args}), name="{name}")' lines = [importline, comment, nodedef] @@ -782,9 +776,7 @@ def _merge_graphs( rootnode = list(Gc.nodes())[nodeidx] paramstr = "" for key, val in sorted(params.items()): - paramstr = "{}_{}_{}".format( - paramstr, _get_valid_pathstr(key), _get_valid_pathstr(val) - ) + paramstr = f"{paramstr}_{_get_valid_pathstr(key)}_{_get_valid_pathstr(val)}" rootnode.set_input(key, val) logger.debug("Parameterization: paramstr=%s", paramstr) @@ -916,10 +908,8 @@ def _propagate_internal_output(graph, node, field, connections, portinputs): src_func = src_port[1].split("\\n")[0] dst_func = src[1].split("\\n")[0] raise ValueError( - "Does not support two inline functions " - "in series ('{}' and '{}'), found when " - "connecting {} to {}. Please use a Function " - "node.".format(src_func, dst_func, srcnode, destnode) + f"Does not support two inline functions in series ('{src_func}' and '{dst_func}'), " + f"found when connecting {srcnode} to {destnode}. Please use a Function node." ) connect = graph.get_edge_data(srcnode, destnode, default={"connect": []}) diff --git a/nipype/pipeline/engine/workflows.py b/nipype/pipeline/engine/workflows.py index 93758b0a13..7bc4faef34 100644 --- a/nipype/pipeline/engine/workflows.py +++ b/nipype/pipeline/engine/workflows.py @@ -444,8 +444,7 @@ def write_graph( if graph2use in ["hierarchical", "colored"]: if self.name[:1].isdigit(): # these graphs break if int raise ValueError( - "{} graph failed, workflow name cannot begin " - "with a number".format(graph2use) + f"{graph2use} graph failed, workflow name cannot begin with a number" ) dotfilename = op.join(base_dir, dotfilename) self.write_hierarchical_dotfile( diff --git a/nipype/pipeline/plugins/base.py b/nipype/pipeline/plugins/base.py index 1571ab71a9..f84dccb039 100644 --- a/nipype/pipeline/plugins/base.py +++ b/nipype/pipeline/plugins/base.py @@ -534,11 +534,8 @@ def _get_result(self, taskid): results_file = None try: error_message = ( - "Job id ({}) finished or terminated, but " - "results file does not exist after ({}) " - "seconds. Batch dir contains crashdump file " - "if node raised an exception.\n" - "Node working directory: ({}) ".format(taskid, timeout, node_dir) + f"Job id ({taskid}) finished or terminated, but results file does not exist after ({timeout}) seconds. Batch dir contains crashdump file if node raised an exception.\n" + f"Node working directory: ({node_dir}) " ) raise OSError(error_message) except OSError: diff --git a/nipype/pipeline/plugins/lsf.py b/nipype/pipeline/plugins/lsf.py index cf334be051..fea0d4267d 100644 --- a/nipype/pipeline/plugins/lsf.py +++ b/nipype/pipeline/plugins/lsf.py @@ -85,11 +85,7 @@ def _submit_batchtask(self, scriptfile, node): jobnameitems = jobname.split(".") jobnameitems.reverse() jobname = ".".join(jobnameitems) - cmd.inputs.args = "{} -J {} sh {}".format( - bsubargs, - jobname, - scriptfile, - ) # -J job_name_spec + cmd.inputs.args = f"{bsubargs} -J {jobname} sh {scriptfile}" # -J job_name_spec logger.debug("bsub " + cmd.inputs.args) oldlevel = iflogger.level iflogger.setLevel(logging.getLevelName("CRITICAL")) diff --git a/nipype/pipeline/plugins/sge.py b/nipype/pipeline/plugins/sge.py index 38079e947d..384f408d46 100644 --- a/nipype/pipeline/plugins/sge.py +++ b/nipype/pipeline/plugins/sge.py @@ -82,9 +82,7 @@ def is_job_state_pending(self): time_diff = time.time() - self._job_info_creation_time if self.is_zombie(): sge_debug_print( - "DONE! QJobInfo.IsPending found in 'zombie' list, returning False so claiming done!\n{}".format( - self - ) + f"DONE! QJobInfo.IsPending found in 'zombie' list, returning False so claiming done!\n{self}" ) is_pending_status = False # Job explicitly found as being completed! elif self.is_initializing() and (time_diff > 600): @@ -253,10 +251,7 @@ def _parse_qstat_job_list(self, xml_job_list): self._task_dictionary[dictionary_job].set_state("zombie") else: sge_debug_print( - "ERROR: Job not in current parselist, " - "and not in done list {}: {}".format( - dictionary_job, self._task_dictionary[dictionary_job] - ) + f"ERROR: Job not in current parselist, and not in done list {dictionary_job}: {self._task_dictionary[dictionary_job]}" ) if self._task_dictionary[dictionary_job].is_initializing(): is_completed = self._qacct_verified_complete(dictionary_job) @@ -264,10 +259,7 @@ def _parse_qstat_job_list(self, xml_job_list): self._task_dictionary[dictionary_job].set_state("zombie") else: sge_debug_print( - "ERROR: Job not in still in initialization mode, " - "and not in done list {}: {}".format( - dictionary_job, self._task_dictionary[dictionary_job] - ) + f"ERROR: Job not in still in initialization mode, and not in done list {dictionary_job}: {self._task_dictionary[dictionary_job]}" ) def _run_qstat(self, reason_for_qstat, force_instant=True): @@ -279,8 +271,7 @@ def _run_qstat(self, reason_for_qstat, force_instant=True): -s s suspended jobs """ sge_debug_print( - "WARNING: CONTACTING qmaster for jobs, " - "{}: {}".format(time.time(), reason_for_qstat) + f"WARNING: CONTACTING qmaster for jobs, {time.time()}: {reason_for_qstat}" ) if force_instant: this_command = self._qstat_instant_executable @@ -340,8 +331,7 @@ def is_job_pending(self, task_id): job_is_pending = self._task_dictionary[task_id].is_job_state_pending() else: sge_debug_print( - "ERROR: Job {} not in task list, " - "even after forced qstat!".format(task_id) + f"ERROR: Job {task_id} not in task list, even after forced qstat!" ) job_is_pending = False if not job_is_pending: @@ -352,8 +342,7 @@ def is_job_pending(self, task_id): self._task_dictionary.pop(task_id) else: sge_debug_print( - "ERROR: Job {} not in task list, " - "but attempted to be removed!".format(task_id) + f"ERROR: Job {task_id} not in task list, but attempted to be removed!" ) return job_is_pending diff --git a/nipype/pipeline/plugins/sgegraph.py b/nipype/pipeline/plugins/sgegraph.py index 3d9e8ac40d..17aa514f85 100644 --- a/nipype/pipeline/plugins/sgegraph.py +++ b/nipype/pipeline/plugins/sgegraph.py @@ -144,14 +144,7 @@ def make_job_name(jobnumber, nodeslist): stdoutFile = "" if self._qsub_args.count("-o ") == 0: stdoutFile = f"-o {batchscriptoutfile}" - full_line = "{jobNm}=$(qsub {outFileOption} {errFileOption} {extraQSubArgs} {dependantIndex} -N {jobNm} {batchscript} | awk '/^Your job/{{print $3}}')\n".format( - jobNm=jobname, - outFileOption=stdoutFile, - errFileOption=stderrFile, - extraQSubArgs=qsub_args, - dependantIndex=deps, - batchscript=batchscriptfile, - ) + full_line = f"{jobname}=$(qsub {stdoutFile} {stderrFile} {qsub_args} {deps} -N {jobname} {batchscriptfile} | awk '/^Your job/{{print $3}}')\n" fp.writelines(full_line) cmd = CommandLine( "bash", diff --git a/nipype/pipeline/plugins/slurmgraph.py b/nipype/pipeline/plugins/slurmgraph.py index 0999595f5d..5ed5701acb 100644 --- a/nipype/pipeline/plugins/slurmgraph.py +++ b/nipype/pipeline/plugins/slurmgraph.py @@ -144,14 +144,7 @@ def make_job_name(jobnumber, nodeslist): stdoutFile = "" if self._sbatch_args.count("-o ") == 0: stdoutFile = f"-o {batchscriptoutfile}" - full_line = "{jobNm}=$(sbatch {outFileOption} {errFileOption} {extraSBatchArgs} {dependantIndex} -J {jobNm} {batchscript} | awk '/^Submitted/ {{print $4}}')\n".format( - jobNm=jobname, - outFileOption=stdoutFile, - errFileOption=stderrFile, - extraSBatchArgs=sbatch_args, - dependantIndex=deps, - batchscript=batchscriptfile, - ) + full_line = f"{jobname}=$(sbatch {stdoutFile} {stderrFile} {sbatch_args} {deps} -J {jobname} {batchscriptfile} | awk '/^Submitted/ {{print $4}}')\n" fp.writelines(full_line) cmd = CommandLine( "bash", diff --git a/nipype/pipeline/plugins/tools.py b/nipype/pipeline/plugins/tools.py index bce3eb82da..ae0082d2ed 100644 --- a/nipype/pipeline/plugins/tools.py +++ b/nipype/pipeline/plugins/tools.py @@ -32,12 +32,10 @@ def report_crash(node, traceback=None, hostname=None): keepends=True ) except Exception as exc: - traceback += """ + traceback += f""" During the creation of this crashfile triggered by the above exception, -another exception occurred:\n\n{}.""".format( - exc - ).splitlines( +another exception occurred:\n\n{exc}.""".splitlines( keepends=True ) else: diff --git a/nipype/scripts/utils.py b/nipype/scripts/utils.py index 77e7231bea..0de305bead 100644 --- a/nipype/scripts/utils.py +++ b/nipype/scripts/utils.py @@ -100,10 +100,7 @@ def add_args_options(arg_parser, interface): if has_multiple_inner_traits: raise NotImplementedError( - "This interface cannot be used. via the" - " command line as multiple inner traits" - " are currently not supported for mandatory" - " argument: {}.".format(name) + f"This interface cannot be used via the command line, as multiple inner traits are currently not supported for mandatory argument: {name}." ) arg_parser.add_argument(name, help=desc, **args) else: diff --git a/nipype/sphinxext/apidoc/docstring.py b/nipype/sphinxext/apidoc/docstring.py index cbecc0a5de..33fd0a848f 100644 --- a/nipype/sphinxext/apidoc/docstring.py +++ b/nipype/sphinxext/apidoc/docstring.py @@ -138,15 +138,11 @@ def _parse_spec(inputs, name, spec): pos = spec.position if pos is None: desc_lines += [ - """Maps to a command-line argument: :code:`{arg}`.""".format( - arg=argstr.strip() - ) + f"""Maps to a command-line argument: :code:`{argstr.strip()}`.""" ] else: desc_lines += [ - """Maps to a command-line argument: :code:`{arg}` (position: {pos}).""".format( - arg=argstr.strip(), pos=pos - ) + f"""Maps to a command-line argument: :code:`{argstr.strip()}` (position: {pos}).""" ] xor = spec.xor diff --git a/nipype/utils/filemanip.py b/nipype/utils/filemanip.py index 568ba28ae2..2fd41d77a1 100644 --- a/nipype/utils/filemanip.py +++ b/nipype/utils/filemanip.py @@ -588,11 +588,7 @@ def loadpkl(infile): fmlogger.debug(f"'{infile}' missing; waiting 2s") sleep(2) if timed_out: - error_message = ( - "Result file {} expected, but " - "does not exist after ({}) " - "seconds.".format(infile, timeout) - ) + error_message = f"Result file {infile} expected, but does not exist after {timeout} seconds." raise OSError(error_message) with pklopen(str(infile), "rb") as pkl_file: diff --git a/nipype/utils/tests/test_filemanip.py b/nipype/utils/tests/test_filemanip.py index 11eb7b3d07..895fef2d1e 100644 --- a/nipype/utils/tests/test_filemanip.py +++ b/nipype/utils/tests/test_filemanip.py @@ -207,8 +207,8 @@ def test_recopy(_temp_analyze_files): img_stat = _ignore_atime(os.stat(new_img)) hdr_stat = _ignore_atime(os.stat(new_hdr)) copyfile(orig_img, new_img, **kwargs) - err_msg = "Regular - OS: {}; Copy: {}; Hardlink: {}".format( - os.name, copy, use_hardlink + err_msg = ( + f"Regular - OS: {os.name}; Copy: {copy}; Hardlink: {use_hardlink}" ) assert img_stat == _ignore_atime(os.stat(new_img)), err_msg assert hdr_stat == _ignore_atime(os.stat(new_hdr)), err_msg @@ -219,8 +219,8 @@ def test_recopy(_temp_analyze_files): img_stat = _ignore_atime(os.stat(new_img)) hdr_stat = _ignore_atime(os.stat(new_hdr)) copyfile(img_link, new_img, **kwargs) - err_msg = "Symlink - OS: {}; Copy: {}; Hardlink: {}".format( - os.name, copy, use_hardlink + err_msg = ( + f"Symlink - OS: {os.name}; Copy: {copy}; Hardlink: {use_hardlink}" ) assert img_stat == _ignore_atime(os.stat(new_img)), err_msg assert hdr_stat == _ignore_atime(os.stat(new_hdr)), err_msg diff --git a/tools/checkspecs.py b/tools/checkspecs.py index a3a3d1e88c..11dcb32671 100644 --- a/tools/checkspecs.py +++ b/tools/checkspecs.py @@ -257,10 +257,7 @@ def test_specs(self, uri): for key, value in sorted(trait.__dict__.items()): if key in in_built or key == "desc": continue - input_fields += "{}={},\n ".format( - key, - self._normalize_repr(value), - ) + input_fields += f"{key}={self._normalize_repr(value)},\n " input_fields += "),\n " cmd += [" input_map = dict(%s)" % input_fields] cmd += [" inputs = %s.input_spec()" % c] @@ -348,10 +345,7 @@ def test_specs(self, uri): for key, value in sorted(trait.__dict__.items()): if key in in_built or key == "desc": continue - input_fields += "{}={},\n ".format( - key, - self._normalize_repr(value), - ) + input_fields += f"{key}={self._normalize_repr(value)},\n " input_fields += "),\n " cmd += [" output_map = dict(%s)" % input_fields] cmd += [" outputs = %s.output_spec()" % c]