Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Use context managers to open files #50

Merged
merged 1 commit into from
Jul 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 2 additions & 3 deletions antarctica_today/compute_mean_climatology.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,9 +247,8 @@ def read_daily_sum_melt_averages_picklefile(
)

logger.debug(f"Reading {daily_sum_picklefile}")
f = open(daily_sum_picklefile, "rb")
array, dt_dict = pickle.load(f)
f.close()
with open(daily_sum_picklefile, "rb") as f:
array, dt_dict = pickle.load(f)

return array, dt_dict

Expand Down
11 changes: 5 additions & 6 deletions antarctica_today/generate_antarctica_today_map.py
Original file line number Diff line number Diff line change
Expand Up @@ -548,9 +548,8 @@ def _generate_new_baseline_map_figure(

if save_to_picklefile:
fname = map_picklefile_dictionary[(map_type_lower, region_number)]
f = open(fname, "wb")
pickle.dump(fig, f)
f.close()
with open(fname, "wb") as f:
pickle.dump(fig, f)
logger.debug(f"Wrote {fname}")

return fig, ax
Expand All @@ -571,9 +570,9 @@ def _read_baseline_map_picklefile(self, map_type="daily", region_number=0):
logger.debug(f"Reading {fname}")

# Read the picklefile
f = open(fname, "rb")
fig = pickle.load(f)
f.close()
with open(fname, "rb") as f:
fig = pickle.load(f)

# Get the axes, should just be one panel here.
ax = fig.axes[0]
# Set the current axes to ax
Expand Down
5 changes: 2 additions & 3 deletions antarctica_today/update_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -310,9 +310,8 @@ def update_everything_to_latest_date(
dt_dict[dt] = previous_melt_array.shape[2] + i

if overwrite:
f = open(tb_file_data.model_results_picklefile, "wb")
pickle.dump((melt_array_updated, dt_dict), f)
f.close()
with open(tb_file_data.model_results_picklefile, "wb") as f:
pickle.dump((melt_array_updated, dt_dict), f)

logger.info(f"Wrote {tb_file_data.model_results_picklefile}")

Expand Down
51 changes: 25 additions & 26 deletions antarctica_today/write_flat_binary.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,35 +36,34 @@ def write_array_to_binary(
byteorder = "big"

# Open the output file name.
f = open(bin_filename, "wb")

# Convert the number of bytes into the correct numpy array datatype.
if signed:
n_dtype = {1: numpy.int8, 2: numpy.int16, 4: numpy.int32, 8: numpy.int64}[
int(numbytes)
]
else:
n_dtype = {1: numpy.uint8, 2: numpy.uint16, 4: numpy.uint32, 8: numpy.uint64}[
int(numbytes)
]

# Converte the array into the appropriate data type, and multiply by the multiplier
out_array = numpy.array(array * multiplier, dtype=n_dtype)

# Flatten the array.
out_array = out_array.flatten()

for value in out_array:
f.write(
int.to_bytes(
int(value), length=numbytes, byteorder=byteorder, signed=signed
with open(bin_filename, "wb") as f:
# Convert the number of bytes into the correct numpy array datatype.
if signed:
n_dtype = {1: numpy.int8, 2: numpy.int16, 4: numpy.int32, 8: numpy.int64}[
int(numbytes)
]
else:
n_dtype = {
1: numpy.uint8,
2: numpy.uint16,
4: numpy.uint32,
8: numpy.uint64,
}[int(numbytes)]

# Converte the array into the appropriate data type, and multiply by the multiplier
out_array = numpy.array(array * multiplier, dtype=n_dtype)

# Flatten the array.
out_array = out_array.flatten()

for value in out_array:
f.write(
int.to_bytes(
int(value), length=numbytes, byteorder=byteorder, signed=signed
)
)
)

f.close()

logger.debug(f"Wrote {os.path.split(bin_filename)[-1]}")

return bin_filename


Expand Down
Loading