Skip to content

Commit

Permalink
STY: Apply ruff/pyupgrade rule UP032
Browse files Browse the repository at this point in the history
UP032 Use f-string instead of `format` call
  • Loading branch information
DimitriPapadopoulos committed Oct 7, 2024
1 parent 43358d7 commit b2c4a28
Show file tree
Hide file tree
Showing 16 changed files with 46 additions and 114 deletions.
8 changes: 6 additions & 2 deletions nipype/interfaces/base/tests/test_resource_monitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,9 @@ class UseResources(CommandLine):

@pytest.mark.skip(reason="inconsistent readings")
@pytest.mark.skipif(os.getenv("CI_SKIP_TEST", False), reason="disabled in CI tests")
@pytest.mark.parametrize(("mem_gb", "n_procs"), [(0.5, 3), (2.2, 8), (0.8, 4), (1.5, 1)])
@pytest.mark.parametrize(
("mem_gb", "n_procs"), [(0.5, 3), (2.2, 8), (0.8, 4), (1.5, 1)]
)
def test_cmdline_profiling(tmpdir, mem_gb, n_procs, use_resource_monitor):
"""
Test runtime profiler correctly records workflow RAM/CPUs consumption
Expand All @@ -80,7 +82,9 @@ def test_cmdline_profiling(tmpdir, mem_gb, n_procs, use_resource_monitor):
@pytest.mark.skipif(
True, reason="test disabled temporarily, until function profiling works"
)
@pytest.mark.parametrize(("mem_gb", "n_procs"), [(0.5, 3), (2.2, 8), (0.8, 4), (1.5, 1)])
@pytest.mark.parametrize(
("mem_gb", "n_procs"), [(0.5, 3), (2.2, 8), (0.8, 4), (1.5, 1)]
)
def test_function_profiling(tmpdir, mem_gb, n_procs, use_resource_monitor):
"""
Test runtime profiler correctly records workflow RAM/CPUs consumption
Expand Down
31 changes: 11 additions & 20 deletions nipype/interfaces/spm/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,22 +105,17 @@ def _make_matlab_command(self, _):
self.inputs.mat = self._make_mat_file()
if not isdefined(self.inputs.invmat):
self.inputs.invmat = self._make_inv_file()
script = """
target = '{}';
moving = '{}';
script = f"""
target = '{self.inputs.target}';
moving = '{self.inputs.moving}';
targetv = spm_vol(target);
movingv = spm_vol(moving);
x = spm_coreg(targetv, movingv);
M = spm_matrix(x);
save('{}' , 'M' );
save('{self.inputs.mat}' , 'M' );
M = inv(M);
save('{}','M')
""".format(
self.inputs.target,
self.inputs.moving,
self.inputs.mat,
self.inputs.invmat,
)
save('{self.inputs.invmat}','M')
"""
return script

def _list_outputs(self):
Expand Down Expand Up @@ -166,10 +161,10 @@ def _make_matlab_command(self, _):
"""checks for SPM, generates script"""
outputs = self._list_outputs()
self.inputs.out_file = outputs["out_file"]
script = """
infile = '{}';
outfile = '{}'
transform = load('{}');
script = f"""
infile = '{self.inputs.in_file}';
outfile = '{self.inputs.out_file}'
transform = load('{self.inputs.mat}');
V = spm_vol(infile);
X = spm_read_vols(V);
Expand All @@ -178,11 +173,7 @@ def _make_matlab_command(self, _):
V.fname = fullfile(outfile);
spm_write_vol(V,X);
""".format(
self.inputs.in_file,
self.inputs.out_file,
self.inputs.mat,
)
"""
# img_space = spm_get_space(infile);
# spm_get_space(infile, transform.M * img_space);
return script
Expand Down
3 changes: 1 addition & 2 deletions nipype/interfaces/workbench/metric.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,8 +149,7 @@ def _format_arg(self, opt, spec, val):
if opt in ["current_area", "new_area"]:
if not self.inputs.area_surfs and not self.inputs.area_metrics:
raise ValueError(
"{} was set but neither area_surfs or"
" area_metrics were set".format(opt)
f"{opt} was set but neither area_surfs or area_metrics were set"
)
if opt == "method":
if (
Expand Down
18 changes: 4 additions & 14 deletions nipype/pipeline/engine/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -369,13 +369,7 @@ def format_node(node, format="python", include_config=False):
args = ", ".join(filled_args)
klass_name = klass.__class__.__name__
if isinstance(node, MapNode):
nodedef = '{} = MapNode({}({}), iterfield={}, name="{}")'.format(
name,
klass_name,
args,
node.iterfield,
name,
)
nodedef = f'{name} = MapNode({klass_name}({args}), iterfield={node.iterfield}, name="{name}")'
else:
nodedef = f'{name} = Node({klass_name}({args}), name="{name}")'
lines = [importline, comment, nodedef]
Expand Down Expand Up @@ -782,9 +776,7 @@ def _merge_graphs(
rootnode = list(Gc.nodes())[nodeidx]
paramstr = ""
for key, val in sorted(params.items()):
paramstr = "{}_{}_{}".format(
paramstr, _get_valid_pathstr(key), _get_valid_pathstr(val)
)
paramstr = f"{paramstr}_{_get_valid_pathstr(key)}_{_get_valid_pathstr(val)}"
rootnode.set_input(key, val)

logger.debug("Parameterization: paramstr=%s", paramstr)
Expand Down Expand Up @@ -916,10 +908,8 @@ def _propagate_internal_output(graph, node, field, connections, portinputs):
src_func = src_port[1].split("\\n")[0]
dst_func = src[1].split("\\n")[0]
raise ValueError(
"Does not support two inline functions "
"in series ('{}' and '{}'), found when "
"connecting {} to {}. Please use a Function "
"node.".format(src_func, dst_func, srcnode, destnode)
f"Does not support two inline functions in series ('{src_func}' and '{dst_func}'), "
f"found when connecting {srcnode} to {destnode}. Please use a Function node."
)

connect = graph.get_edge_data(srcnode, destnode, default={"connect": []})
Expand Down
3 changes: 1 addition & 2 deletions nipype/pipeline/engine/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -444,8 +444,7 @@ def write_graph(
if graph2use in ["hierarchical", "colored"]:
if self.name[:1].isdigit(): # these graphs break if int
raise ValueError(
"{} graph failed, workflow name cannot begin "
"with a number".format(graph2use)
f"{graph2use} graph failed, workflow name cannot begin with a number"
)
dotfilename = op.join(base_dir, dotfilename)
self.write_hierarchical_dotfile(
Expand Down
7 changes: 2 additions & 5 deletions nipype/pipeline/plugins/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -534,11 +534,8 @@ def _get_result(self, taskid):
results_file = None
try:
error_message = (
"Job id ({}) finished or terminated, but "
"results file does not exist after ({}) "
"seconds. Batch dir contains crashdump file "
"if node raised an exception.\n"
"Node working directory: ({}) ".format(taskid, timeout, node_dir)
f"Job id ({taskid}) finished or terminated, but results file does not exist after ({timeout}) seconds. Batch dir contains crashdump file if node raised an exception.\n"
f"Node working directory: ({node_dir}) "
)
raise OSError(error_message)
except OSError:
Expand Down
6 changes: 1 addition & 5 deletions nipype/pipeline/plugins/lsf.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,11 +85,7 @@ def _submit_batchtask(self, scriptfile, node):
jobnameitems = jobname.split(".")
jobnameitems.reverse()
jobname = ".".join(jobnameitems)
cmd.inputs.args = "{} -J {} sh {}".format(
bsubargs,
jobname,
scriptfile,
) # -J job_name_spec
cmd.inputs.args = f"{bsubargs} -J {jobname} sh {scriptfile}" # -J job_name_spec
logger.debug("bsub " + cmd.inputs.args)
oldlevel = iflogger.level
iflogger.setLevel(logging.getLevelName("CRITICAL"))
Expand Down
23 changes: 6 additions & 17 deletions nipype/pipeline/plugins/sge.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,9 +82,7 @@ def is_job_state_pending(self):
time_diff = time.time() - self._job_info_creation_time
if self.is_zombie():
sge_debug_print(
"DONE! QJobInfo.IsPending found in 'zombie' list, returning False so claiming done!\n{}".format(
self
)
f"DONE! QJobInfo.IsPending found in 'zombie' list, returning False so claiming done!\n{self}"
)
is_pending_status = False # Job explicitly found as being completed!
elif self.is_initializing() and (time_diff > 600):
Expand Down Expand Up @@ -253,21 +251,15 @@ def _parse_qstat_job_list(self, xml_job_list):
self._task_dictionary[dictionary_job].set_state("zombie")
else:
sge_debug_print(
"ERROR: Job not in current parselist, "
"and not in done list {}: {}".format(
dictionary_job, self._task_dictionary[dictionary_job]
)
f"ERROR: Job not in current parselist, and not in done list {dictionary_job}: {self._task_dictionary[dictionary_job]}"
)
if self._task_dictionary[dictionary_job].is_initializing():
is_completed = self._qacct_verified_complete(dictionary_job)
if is_completed:
self._task_dictionary[dictionary_job].set_state("zombie")
else:
sge_debug_print(
"ERROR: Job not in still in initialization mode, "
"and not in done list {}: {}".format(
dictionary_job, self._task_dictionary[dictionary_job]
)
f"ERROR: Job not in still in initialization mode, and not in done list {dictionary_job}: {self._task_dictionary[dictionary_job]}"
)

def _run_qstat(self, reason_for_qstat, force_instant=True):
Expand All @@ -279,8 +271,7 @@ def _run_qstat(self, reason_for_qstat, force_instant=True):
-s s suspended jobs
"""
sge_debug_print(
"WARNING: CONTACTING qmaster for jobs, "
"{}: {}".format(time.time(), reason_for_qstat)
f"WARNING: CONTACTING qmaster for jobs, {time.time()}: {reason_for_qstat}"
)
if force_instant:
this_command = self._qstat_instant_executable
Expand Down Expand Up @@ -340,8 +331,7 @@ def is_job_pending(self, task_id):
job_is_pending = self._task_dictionary[task_id].is_job_state_pending()
else:
sge_debug_print(
"ERROR: Job {} not in task list, "
"even after forced qstat!".format(task_id)
f"ERROR: Job {task_id} not in task list, even after forced qstat!"
)
job_is_pending = False
if not job_is_pending:
Expand All @@ -352,8 +342,7 @@ def is_job_pending(self, task_id):
self._task_dictionary.pop(task_id)
else:
sge_debug_print(
"ERROR: Job {} not in task list, "
"but attempted to be removed!".format(task_id)
f"ERROR: Job {task_id} not in task list, but attempted to be removed!"
)
return job_is_pending

Expand Down
9 changes: 1 addition & 8 deletions nipype/pipeline/plugins/sgegraph.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,14 +144,7 @@ def make_job_name(jobnumber, nodeslist):
stdoutFile = ""
if self._qsub_args.count("-o ") == 0:
stdoutFile = f"-o {batchscriptoutfile}"
full_line = "{jobNm}=$(qsub {outFileOption} {errFileOption} {extraQSubArgs} {dependantIndex} -N {jobNm} {batchscript} | awk '/^Your job/{{print $3}}')\n".format(
jobNm=jobname,
outFileOption=stdoutFile,
errFileOption=stderrFile,
extraQSubArgs=qsub_args,
dependantIndex=deps,
batchscript=batchscriptfile,
)
full_line = f"{jobname}=$(qsub {stdoutFile} {stderrFile} {qsub_args} {deps} -N {jobname} {batchscriptfile} | awk '/^Your job/{{print $3}}')\n"
fp.writelines(full_line)
cmd = CommandLine(
"bash",
Expand Down
9 changes: 1 addition & 8 deletions nipype/pipeline/plugins/slurmgraph.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,14 +144,7 @@ def make_job_name(jobnumber, nodeslist):
stdoutFile = ""
if self._sbatch_args.count("-o ") == 0:
stdoutFile = f"-o {batchscriptoutfile}"
full_line = "{jobNm}=$(sbatch {outFileOption} {errFileOption} {extraSBatchArgs} {dependantIndex} -J {jobNm} {batchscript} | awk '/^Submitted/ {{print $4}}')\n".format(
jobNm=jobname,
outFileOption=stdoutFile,
errFileOption=stderrFile,
extraSBatchArgs=sbatch_args,
dependantIndex=deps,
batchscript=batchscriptfile,
)
full_line = f"{jobname}=$(sbatch {stdoutFile} {stderrFile} {sbatch_args} {deps} -J {jobname} {batchscriptfile} | awk '/^Submitted/ {{print $4}}')\n"
fp.writelines(full_line)
cmd = CommandLine(
"bash",
Expand Down
6 changes: 2 additions & 4 deletions nipype/pipeline/plugins/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,12 +32,10 @@ def report_crash(node, traceback=None, hostname=None):
keepends=True
)
except Exception as exc:
traceback += """
traceback += f"""
During the creation of this crashfile triggered by the above exception,
another exception occurred:\n\n{}.""".format(
exc
).splitlines(
another exception occurred:\n\n{exc}.""".splitlines(
keepends=True
)
else:
Expand Down
5 changes: 1 addition & 4 deletions nipype/scripts/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,10 +100,7 @@ def add_args_options(arg_parser, interface):

if has_multiple_inner_traits:
raise NotImplementedError(
"This interface cannot be used. via the"
" command line as multiple inner traits"
" are currently not supported for mandatory"
" argument: {}.".format(name)
f"This interface cannot be used via the command line, as multiple inner traits are currently not supported for mandatory argument: {name}."
)
arg_parser.add_argument(name, help=desc, **args)
else:
Expand Down
8 changes: 2 additions & 6 deletions nipype/sphinxext/apidoc/docstring.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,15 +138,11 @@ def _parse_spec(inputs, name, spec):
pos = spec.position
if pos is None:
desc_lines += [
"""Maps to a command-line argument: :code:`{arg}`.""".format(
arg=argstr.strip()
)
f"""Maps to a command-line argument: :code:`{argstr.strip()}`."""
]
else:
desc_lines += [
"""Maps to a command-line argument: :code:`{arg}` (position: {pos}).""".format(
arg=argstr.strip(), pos=pos
)
f"""Maps to a command-line argument: :code:`{argstr.strip()}` (position: {pos})."""
]

xor = spec.xor
Expand Down
6 changes: 1 addition & 5 deletions nipype/utils/filemanip.py
Original file line number Diff line number Diff line change
Expand Up @@ -588,11 +588,7 @@ def loadpkl(infile):
fmlogger.debug(f"'{infile}' missing; waiting 2s")
sleep(2)
if timed_out:
error_message = (
"Result file {} expected, but "
"does not exist after ({}) "
"seconds.".format(infile, timeout)
)
error_message = f"Result file {infile} expected, but does not exist after {timeout} seconds."
raise OSError(error_message)

with pklopen(str(infile), "rb") as pkl_file:
Expand Down
8 changes: 4 additions & 4 deletions nipype/utils/tests/test_filemanip.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,8 +207,8 @@ def test_recopy(_temp_analyze_files):
img_stat = _ignore_atime(os.stat(new_img))
hdr_stat = _ignore_atime(os.stat(new_hdr))
copyfile(orig_img, new_img, **kwargs)
err_msg = "Regular - OS: {}; Copy: {}; Hardlink: {}".format(
os.name, copy, use_hardlink
err_msg = (
f"Regular - OS: {os.name}; Copy: {copy}; Hardlink: {use_hardlink}"
)
assert img_stat == _ignore_atime(os.stat(new_img)), err_msg
assert hdr_stat == _ignore_atime(os.stat(new_hdr)), err_msg
Expand All @@ -219,8 +219,8 @@ def test_recopy(_temp_analyze_files):
img_stat = _ignore_atime(os.stat(new_img))
hdr_stat = _ignore_atime(os.stat(new_hdr))
copyfile(img_link, new_img, **kwargs)
err_msg = "Symlink - OS: {}; Copy: {}; Hardlink: {}".format(
os.name, copy, use_hardlink
err_msg = (
f"Symlink - OS: {os.name}; Copy: {copy}; Hardlink: {use_hardlink}"
)
assert img_stat == _ignore_atime(os.stat(new_img)), err_msg
assert hdr_stat == _ignore_atime(os.stat(new_hdr)), err_msg
Expand Down
10 changes: 2 additions & 8 deletions tools/checkspecs.py
Original file line number Diff line number Diff line change
Expand Up @@ -257,10 +257,7 @@ def test_specs(self, uri):
for key, value in sorted(trait.__dict__.items()):
if key in in_built or key == "desc":
continue
input_fields += "{}={},\n ".format(
key,
self._normalize_repr(value),
)
input_fields += f"{key}={self._normalize_repr(value)},\n "
input_fields += "),\n "
cmd += [" input_map = dict(%s)" % input_fields]
cmd += [" inputs = %s.input_spec()" % c]
Expand Down Expand Up @@ -348,10 +345,7 @@ def test_specs(self, uri):
for key, value in sorted(trait.__dict__.items()):
if key in in_built or key == "desc":
continue
input_fields += "{}={},\n ".format(
key,
self._normalize_repr(value),
)
input_fields += f"{key}={self._normalize_repr(value)},\n "
input_fields += "),\n "
cmd += [" output_map = dict(%s)" % input_fields]
cmd += [" outputs = %s.output_spec()" % c]
Expand Down

0 comments on commit b2c4a28

Please sign in to comment.