Skip to content

Commit

Permalink
Merge pull request #14 from NREL/bnb/py_upgrade
Browse files Browse the repository at this point in the history
remove py 3.7 add 3.10 and 3.11
  • Loading branch information
bnb32 authored Jul 5, 2024
2 parents 710bd75 + 43cd619 commit 6976a55
Show file tree
Hide file tree
Showing 6 changed files with 174 additions and 138 deletions.
16 changes: 0 additions & 16 deletions .github/release-drafter.yml

This file was deleted.

8 changes: 5 additions & 3 deletions .github/workflows/pull_request_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,14 @@ jobs:
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
python-version: [3.9]
python-version: ['3.11']
include:
- os: ubuntu-latest
python-version: 3.8
python-version: '3.10'
- os: ubuntu-latest
python-version: 3.7
python-version: '3.9'
- os: ubuntu-latest
python-version: '3.8'

steps:
- uses: actions/checkout@v2
Expand Down
179 changes: 115 additions & 64 deletions farms/utilities.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,46 @@
"""Common utilities FARMS module.
"""
"""Common utilities FARMS module."""

from copy import deepcopy
import pandas as pd
import numpy as np
import os
from copy import deepcopy
from warnings import warn
from farms import RADIUS, CLEAR_TYPES, CLOUD_TYPES, SZA_LIM

import numpy as np
import pandas as pd
import pytest

from farms import CLEAR_TYPES, CLOUD_TYPES, RADIUS, SZA_LIM


def execute_pytest(file, capture="all", flags="-rapP"):
"""Execute module as pytest with detailed summary report.
Parameters
----------
file : str
Pytest file to execute
capture : str
Log or stdout/stderr capture option. ex: log (only logger),
all (includes stdout/stderr)
flags : str
Which tests to show logs and results for.
"""

fname = os.path.basename(file)
pytest.main(["-q", "--show-capture={}".format(capture), fname, flags])


def check_range(data, name, rang=(0, 1)):
"""Ensure that data values are in correct range."""
if np.nanmin(data) < rang[0] or np.nanmax(data) > rang[1]:
raise ValueError('Variable "{n}" is out of expected '
'transmittance/reflectance range. Recommend checking '
'solar zenith angle to ensure cos(sza) is '
'non-negative and non-zero. '
'Max/min of {n} = {mx}/{mn}'
.format(n=name,
mx=np.nanmax(data),
mn=np.nanmin(data)))
raise ValueError(
'Variable "{n}" is out of expected '
"transmittance/reflectance range. Recommend checking "
"solar zenith angle to ensure cos(sza) is "
"non-negative and non-zero. "
"Max/min of {n} = {mx}/{mn}".format(
n=name, mx=np.nanmax(data), mn=np.nanmin(data)
)
)


def ti_to_radius_csv(time_index, n_cols=1):
Expand Down Expand Up @@ -70,8 +91,8 @@ def ti_to_radius(time_index, n_cols=1):
"""
# load earth periodic table
path = os.path.dirname(os.path.realpath(__file__))
df = pd.read_csv(os.path.join(path, 'earth_periodic_terms.csv'))
df['key'] = 1
df = pd.read_csv(os.path.join(path, "earth_periodic_terms.csv"))
df["key"] = 1
# 3.1.1 (4). Julian Date.
j = time_index.to_julian_date().values
# 3.1.2 (5). Julian Ephermeris Date
Expand All @@ -80,18 +101,27 @@ def ti_to_radius(time_index, n_cols=1):
j = (j - 2451545) / 36525
# 3.1.4 (8). Julian Ephemeris Millennium
j = j / 10
df_jme = pd.DataFrame({'uid': range(len(j)), 'jme': j, 'key': 1})
df_jme = pd.DataFrame({"uid": range(len(j)), "jme": j, "key": 1})
# Merge JME with Periodic Table
df_merge = pd.merge(df_jme, df, on='key')
df_merge = pd.merge(df_jme, df, on="key")
# 3.2.1 (9). Heliocentric radius vector.
df_merge['r'] = df_merge['a'] * np.cos(df_merge['b'] + df_merge['c']
* df_merge['jme'])
df_merge["r"] = df_merge["a"] * np.cos(
df_merge["b"] + df_merge["c"] * df_merge["jme"]
)
# 3.2.2 (10).
dfs = df_merge.groupby(by=['uid', 'term'])['r'].sum().unstack()
dfs = df_merge.groupby(by=["uid", "term"])["r"].sum().unstack()
# 3.2.4 (11). Earth Heliocentric radius vector
radius = ((dfs['R0'] + dfs['R1'] * j + dfs['R2'] * np.power(j, 2)
+ dfs['R3'] * np.power(j, 3) + dfs['R4'] * np.power(j, 4)
+ dfs['R5'] * np.power(j, 5)) / np.power(10, 8)).values
radius = (
(
dfs["R0"]
+ dfs["R1"] * j
+ dfs["R2"] * np.power(j, 2)
+ dfs["R3"] * np.power(j, 3)
+ dfs["R4"] * np.power(j, 4)
+ dfs["R5"] * np.power(j, 5)
)
/ np.power(10, 8)
).values
radius = radius.reshape((len(time_index), 1))
radius = np.tile(radius, n_cols)

Expand All @@ -116,15 +146,21 @@ def calc_beta(aod, alpha):
with the mandatory interval [0, 2.2].
"""
if aod.shape != alpha.shape:
raise ValueError('To calculate beta, aod and alpha inputs must be of '
'the same shape. Received arrays of shape {} and {}'
.format(aod.shape, alpha.shape))
raise ValueError(
"To calculate beta, aod and alpha inputs must be of "
"the same shape. Received arrays of shape {} and {}".format(
aod.shape, alpha.shape
)
)

beta = aod * np.power(0.55, alpha)
if np.max(beta) > 2.2 or np.min(beta) < 0:
warn('Calculation of beta resulted in values outside of '
'expected range [0, 2.2]. Min/max of beta are: {}/{}'
.format(np.min(beta), np.max(beta)))
warn(
"Calculation of beta resulted in values outside of "
"expected range [0, 2.2]. Min/max of beta are: {}/{}".format(
np.min(beta), np.max(beta)
)
)

return beta

Expand Down Expand Up @@ -213,11 +249,12 @@ def merge_rest_farms(clearsky_irrad, cloudy_irrad, cloud_type):
FARMS and REST.
"""
# disable nan warnings
np.seterr(divide='ignore', invalid='ignore')
np.seterr(divide="ignore", invalid="ignore")

# combine clearsky and farms according to the cloud types.
all_sky_irrad = np.where(np.isin(cloud_type, CLEAR_TYPES),
clearsky_irrad, cloudy_irrad)
all_sky_irrad = np.where(
np.isin(cloud_type, CLEAR_TYPES), clearsky_irrad, cloudy_irrad
)

return all_sky_irrad

Expand Down Expand Up @@ -256,7 +293,7 @@ def screen_sza(sza, lim=SZA_LIM):
Upper limit of SZA in degrees.
Returns
----------
-------
sza : np.ndarray
Solar zenith angle in degrees with max value = lim.
"""
Expand Down Expand Up @@ -288,9 +325,16 @@ def dark_night(irrad_data, sza, lim=SZA_LIM):
return irrad_data


def cloud_variability(irrad, cs_irrad, cloud_type, var_frac=0.05,
distribution='uniform', option='tri', tri_center=0.9,
random_seed=123):
def cloud_variability(
irrad,
cs_irrad,
cloud_type,
var_frac=0.05,
distribution="uniform",
option="tri",
tri_center=0.9,
random_seed=123,
):
"""Add syntehtic variability to irradiance when it's cloudy.
Parameters
Expand Down Expand Up @@ -321,7 +365,7 @@ def cloud_variability(irrad, cs_irrad, cloud_type, var_frac=0.05,
to cloudy timesteps.
"""
# disable divide by zero warnings
np.seterr(divide='ignore', invalid='ignore')
np.seterr(divide="ignore", invalid="ignore")

if var_frac:
# set a seed for psuedo-random but repeatable results
Expand All @@ -332,25 +376,27 @@ def cloud_variability(irrad, cs_irrad, cloud_type, var_frac=0.05,
# Set the cloud/clear ratio to zero when it's nighttime
csr[(cs_irrad == 0)] = 0

if distribution == 'uniform':
variability_scalar = uniform_variability(csr, cloud_type, var_frac,
option=option,
tri_center=tri_center)
elif distribution == 'normal':
variability_scalar = normal_variability(csr, cloud_type, var_frac,
option=option,
tri_center=tri_center)
if distribution == "uniform":
variability_scalar = uniform_variability(
csr, cloud_type, var_frac, option=option, tri_center=tri_center
)
elif distribution == "normal":
variability_scalar = normal_variability(
csr, cloud_type, var_frac, option=option, tri_center=tri_center
)
else:
raise ValueError('Did not recognize distribution: {}'
.format(distribution))
raise ValueError(
"Did not recognize distribution: {}".format(distribution)
)

irrad *= variability_scalar

return irrad


def uniform_variability(csr, cloud_type, var_frac, option='tri',
tri_center=0.9):
def uniform_variability(
csr, cloud_type, var_frac, option="tri", tri_center=0.9
):
"""Get an array with uniform variability scalars centered at 1 that can be
multiplied by a irradiance array with the same shape as csr.
Expand All @@ -376,13 +422,14 @@ def uniform_variability(csr, cloud_type, var_frac, option='tri',
1 with range (1 - var_frac) to (1 + var_frac). This array can be
multiplied by an irradiance array with the same shape as csr
"""
if option == 'linear':
if option == "linear":
var_frac_arr = linear_variability(csr, var_frac)
elif option == 'tri':
elif option == "tri":
var_frac_arr = tri_variability(csr, var_frac, tri_center=tri_center)
else:
raise ValueError('Did not recognize variability option: {}'
.format(option))
raise ValueError(
"Did not recognize variability option: {}".format(option)
)

# get a uniform random scalar array 0 to 1 with data shape
rand_arr = np.random.rand(csr.shape[0], csr.shape[1])
Expand All @@ -391,14 +438,16 @@ def uniform_variability(csr, cloud_type, var_frac, option='tri',
variability_scalar = 1 + var_frac_arr * (rand_arr * 2 - 1)

# only apply rand to the applicable cloudy timesteps
variability_scalar = np.where(np.isin(cloud_type, CLOUD_TYPES),
variability_scalar, 1)
variability_scalar = np.where(
np.isin(cloud_type, CLOUD_TYPES), variability_scalar, 1
)

return variability_scalar


def normal_variability(csr, cloud_type, var_frac, option='tri',
tri_center=0.9):
def normal_variability(
csr, cloud_type, var_frac, option="tri", tri_center=0.9
):
"""Get an array with a normal distribution of variability scalars centered
at 1 that can be multiplied by a irradiance array with the same shape as
csr.
Expand Down Expand Up @@ -426,13 +475,14 @@ def normal_variability(csr, cloud_type, var_frac, option='tri',
centered at 1 with range (1 - var_frac) to (1 + var_frac). This array
can be multiplied by an irradiance array with the same shape as csr
"""
if option == 'linear':
if option == "linear":
var_frac_arr = linear_variability(csr, var_frac)
elif option == 'tri':
elif option == "tri":
var_frac_arr = tri_variability(csr, var_frac, tri_center=tri_center)
else:
raise ValueError('Did not recognize variability option: {}'
.format(option))
raise ValueError(
"Did not recognize variability option: {}".format(option)
)

# get a normal distribution of data centered at 0 with stdev 1
rand_arr = np.random.normal(loc=0.0, scale=1.0, size=csr.shape)
Expand All @@ -441,8 +491,9 @@ def normal_variability(csr, cloud_type, var_frac, option='tri',
variability_scalar = 1 + var_frac_arr * rand_arr

# only apply rand to the applicable cloudy timesteps
variability_scalar = np.where(np.isin(cloud_type, CLOUD_TYPES),
variability_scalar, 1)
variability_scalar = np.where(
np.isin(cloud_type, CLOUD_TYPES), variability_scalar, 1
)

return variability_scalar

Expand Down
22 changes: 12 additions & 10 deletions setup.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,16 @@
"""
setup.py
"""

import os
import shlex
from codecs import open
from setuptools import setup, find_packages
from setuptools.command.develop import develop
from subprocess import check_call
import shlex
from warnings import warn

from setuptools import find_packages, setup
from setuptools.command.develop import develop


class PostDevelopCommand(develop):
"""
Expand All @@ -22,8 +24,7 @@ def run(self):
try:
check_call(shlex.split("pre-commit install"))
except Exception as e:
warn("Unable to run 'pre-commit install': {}"
.format(e))
warn("Unable to run 'pre-commit install': {}".format(e))

develop.run(self)

Expand All @@ -39,9 +40,9 @@ def run(self):
with open(os.path.join(here, "farms", "version.py"), encoding="utf-8") as f:
version = f.read()

version = version.split('=')[-1].strip().strip('"').strip("'")
version = version.split("=")[-1].strip().strip('"').strip("'")

test_requires = ["pytest>=5.2", ]
test_requires = ["pytest>=5.2"]
description = "The Fast All-sky Radiation Model for Solar applications (FARMS)"

setup(
Expand All @@ -54,13 +55,14 @@ def run(self):
url="https://github.com/NREL/farms",
packages=find_packages(),
package_dir={"farms": "farms"},
package_data={'farms': ['earth_periodic_terms.csv',
'sun_earth_radius_vector.csv']},
package_data={
"farms": ["earth_periodic_terms.csv", "sun_earth_radius_vector.csv"]
},
include_package_data=True,
license="BSD 3-Clause",
zip_safe=False,
keywords="farms",
python_requires='>=3.7',
python_requires=">=3.8",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
Expand Down
Loading

0 comments on commit 6976a55

Please sign in to comment.