Skip to content

Commit

Permalink
Merge pull request #166 from nismod/weather_refactor
Browse files Browse the repository at this point in the history
Weather refactor
  • Loading branch information
sveneggimann authored Mar 29, 2019
2 parents 532ae59 + c902cb2 commit 9d3e34f
Show file tree
Hide file tree
Showing 14 changed files with 157 additions and 367 deletions.
6 changes: 0 additions & 6 deletions energy_demand/geography/weather_region.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@ class WeatherRegion(object):
----------
name : str
Unique identifyer of weather region
latitude, longitude : float
Coordinates
assumptions : dict
Assumptions
technologies : list
Expand All @@ -43,8 +41,6 @@ class WeatherRegion(object):
def __init__(
self,
name,
latitude,
longitude,
assumptions,
technologies,
enduses,
Expand All @@ -57,8 +53,6 @@ def __init__(
"""Constructor of weather region
"""
self.name = name
self.longitude = longitude
self.latitude = latitude

fueltypes = lookup_tables.basic_lookups()['fueltypes']

Expand Down
22 changes: 3 additions & 19 deletions energy_demand/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,6 @@ def energy_demand_model(
data,
criterias,
assumptions,
weather_stations,
weather_yr,
weather_by
):
Expand Down Expand Up @@ -65,7 +64,6 @@ def energy_demand_model(
data=data,
criterias=criterias,
assumptions=assumptions,
weather_stations=weather_stations,
weather_yr=weather_yr,
weather_by=weather_by)

Expand Down Expand Up @@ -165,15 +163,6 @@ def energy_demand_model(
data['regions'] = read_data.get_region_names(name_region_set)
data['reg_coord'] = basic_functions.get_long_lat_decimal_degrees(read_data.get_region_centroids(name_region_set))
data['scenario_data']['population'] = data_loader.read_scenario_data(name_population_dataset, region_name='lad_uk_2016', value_name='population')

# Write out coordinates
statistics_to_print = []
for i, j in data['reg_coord'].items():
statistics_to_print.append("{},{},{}".format(i,j['latitude'], j['longitude']))
# Write info to txt
write_data.write_list_to_txt(
os.path.join("C:/AAA/_test.txt"),
statistics_to_print)

data['scenario_data']['gva_industry'] = data_loader.read_scenario_data_gva(name_gva_dataset, region_name='lad_uk_2016', value_name='gva_per_head', all_dummy_data=False)
data['scenario_data']['gva_per_head'] = data_loader.read_scenario_data(name_gva_dataset_per_head, region_name='lad_uk_2016', value_name='gva_per_head')
Expand Down Expand Up @@ -278,16 +267,12 @@ def energy_demand_model(
# Make selection of weather stations and data
# Load all temperature and weather station data
# ---------------------------------------------
data['weather_stations'], data['temp_data'] = data_loader.load_temp_data(
data['local_paths'],
data['temp_data'] = data_loader.load_temp_data(
sim_yrs=sim_yrs,
regions=data['regions'],
weather_realisation=weather_realisation,
path_weather_data=path_weather_data,
same_base_year_weather=False,
crit_temp_min_max=config['CRITERIA']['crit_temp_min_max'],
load_np=False,
load_parquet=False,
load_csv=True)
same_base_year_weather=False)

# ------------------------------------------------------------
# Disaggregate national energy demand to regional demands
Expand Down Expand Up @@ -456,7 +441,6 @@ def energy_demand_model(
data,
config['CRITERIA'],
data['assumptions'],
data['weather_stations'],
weather_yr=weather_yr_scenario,
weather_by=data['assumptions'].weather_by)

Expand Down
21 changes: 3 additions & 18 deletions energy_demand/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@ def tqdm(iterator, *_, **__):
from energy_demand.profiles import load_factors
from energy_demand.profiles import generic_shapes
from energy_demand.basic import demand_supply_interaction
from energy_demand.geography import weather_station_location

class EnergyDemandModel(object):
""" Main function of energy demand model. All submodels
Expand All @@ -66,7 +65,6 @@ def __init__(
data,
criterias,
assumptions,
weather_stations,
weather_yr,
weather_by
):
Expand Down Expand Up @@ -102,7 +100,6 @@ def __init__(
# -------------------------------------------
# Simulate regions
# -------------------------------------------
#_all_closest_weather_stations = []
for reg_array_nr, region in enumerate(tqdm(regions)):

print("... Simulate: region %s, simulation year: %s, percent: (%s)",
Expand All @@ -114,10 +111,8 @@ def __init__(
data,
criterias,
assumptions,
weather_stations,
weather_yr,
weather_by)
#_all_closest_weather_stations.append(all_submodels)#weather_region_id

# ---------------------------------------------
# Aggregate results specifically over regions
Expand Down Expand Up @@ -309,7 +304,6 @@ def simulate_region(
data,
criterias,
assumptions,
weather_stations,
weather_yr,
weather_by
):
Expand All @@ -329,24 +323,16 @@ def simulate_region(
"""
submodel_names = assumptions.submodels_names

# Get closest weather region object
weather_region_id = weather_station_location.get_closest_station(
latitude_reg=data['reg_coord'][region]['latitude'],
longitude_reg=data['reg_coord'][region]['longitude'],
weather_stations=weather_stations)

# ----------------------------
# Create Base year and current weather Regions
# ----------------------------
weather_region_cy = WeatherRegion(
name=weather_region_id,
latitude=weather_stations[weather_region_id]['latitude'],
longitude=weather_stations[weather_region_id]['longitude'],
name=region,
assumptions=assumptions,
technologies=assumptions.technologies,
enduses=data['enduses'],
temp_by=data['temp_data'][weather_by][weather_region_id],
temp_cy=data['temp_data'][weather_yr][weather_region_id],
temp_by=data['temp_data'][weather_by][region],
temp_cy=data['temp_data'][weather_yr][region],
tech_lp=data['tech_lp'],
sectors=data['sectors'],
crit_temp_min_max=criterias['crit_temp_min_max'])
Expand Down Expand Up @@ -406,7 +392,6 @@ def simulate_region(
dw_stock=dw_stock,
reg_scen_drivers=assumptions.scenario_drivers,
flat_profile_crit=flat_profile_crit)
#return weather_region_id

def fuel_aggr(
sector_models,
Expand Down
27 changes: 2 additions & 25 deletions energy_demand/profiles/hdd_cdd.py
Original file line number Diff line number Diff line change
Expand Up @@ -441,8 +441,6 @@ def get_hdd_country(
t_base_heating,
regions,
temp_data,
reg_coord,
weather_stations,
crit_temp_min_max=False
):
"""Calculate total number of heating degree days in a region for the base year
Expand All @@ -457,18 +455,9 @@ def get_hdd_country(
hdd_regions = {}

for region in regions:

# Get closest weather station and temperatures
closest_station_id = weather_station.get_closest_station(
reg_coord[region]['latitude'],
reg_coord[region]['longitude'],
weather_stations)

temperatures = temp_data[closest_station_id]

hdd_reg = calc_hdd(
t_base_heating,
temperatures,
temp_data[region],
nr_day_to_av=1,
crit_temp_min_max=crit_temp_min_max)

Expand All @@ -480,8 +469,6 @@ def get_cdd_country(
t_base_cooling,
regions,
temp_data,
reg_coord,
weather_stations,
crit_temp_min_max=False
):
"""Calculate total number of cooling degree days in a
Expand All @@ -503,19 +490,9 @@ def get_cdd_country(
cdd_regions = {}

for region in regions:

# Get closest weather station and temperatures
closest_station_id = weather_station.get_closest_station(
reg_coord[region]['latitude'],
reg_coord[region]['longitude'],
weather_stations)

# Temp data
temperatures = temp_data[closest_station_id]

cdd_reg = calc_cdd(
t_base_cooling,
temperatures,
temp_data[region],
nr_day_to_av=1,
crit_temp_min_max=crit_temp_min_max)

Expand Down
125 changes: 29 additions & 96 deletions energy_demand/read_write/data_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -882,15 +882,11 @@ def load_weather_stations_csv(path_stations):
return out_stations

def load_temp_data(
local_paths,
sim_yrs,
regions,
weather_realisation,
path_weather_data,
same_base_year_weather=False,
crit_temp_min_max=False,
load_np=False,
load_parquet=False,
load_csv=True
):
"""Read in cleaned temperature and weather station data
Expand All @@ -905,111 +901,48 @@ def load_temp_data(
Returns
-------
weather_stations : dict
Weather stations
temp_data : dict
Temperaturesv
crit_temp_min_max : dict
True: Hourly temperature data are provided
False: min and max temperature are provided
t_yrs_stations : dict
Temperatures {sim_yr: {stations: {t_min: np.array(365), t_max: np.array(365)}}}
[year][region]['tmin and tmax']
Info
----
PAarquest file http://pandas.pydata.org/pandas-docs/stable/io.html#io-parquet
Parquet file http://pandas.pydata.org/pandas-docs/stable/io.html#io-parquet
"""
logging.debug("... loading temperatures", flush=True)

temp_data_short = defaultdict(dict)
weather_stations_with_data = defaultdict(dict)

if crit_temp_min_max:

# ------------------
# Read stations
# ------------------
path_stations = os.path.join(path_weather_data, "stations_{}.csv".format(weather_realisation))

weather_stations_with_data = load_weather_stations_csv(path_stations)

# ------------------
# Read temperatures
# ------------------
if load_np:
path_temp_data = os.path.join(path_weather_data, "weather_data_{}.npy".format(weather_realisation))
full_data = np.load(path_temp_data)

# Convert npy to dataframe
df_full_data = pd.DataFrame(
full_data,
columns=['timestep', 'station_id', 'stiching_name', 'yearday', 't_min', 't_max'])
if load_parquet:
path_temp_data = os.path.join(path_weather_data, "weather_data_{}.parquet".format(weather_realisation))
df_full_data = pd.read_parquet(path_temp_data, engine='pyarrow')
if load_csv:
path_temp_data = os.path.join(path_weather_data, "weather_data_{}.csv".format(weather_realisation))
df_full_data = pd.read_csv(path_temp_data)

for sim_yr in sim_yrs:

if same_base_year_weather:
sim_yr = sim_yrs[0]
else:
pass
print(" ... year: {}".format(sim_yr), flush=True)

# Select all station values
df_timestep = df_full_data.loc[df_full_data['timestep'] == sim_yr]

for station_id in weather_stations_with_data:

df_timestep_station = df_timestep.loc[df_timestep['station_id'] == station_id]

# Remove extrated rows to speed up process
df_timestep = df_timestep.drop(list(df_timestep_station.index))

t_min = list(df_timestep_station['t_min'].values)
t_max = list(df_timestep_station['t_max'].values)

temp_data_short[sim_yr][station_id] = {
't_min': np.array(t_min),
't_max': np.array(t_max)}

return dict(weather_stations_with_data), dict(temp_data_short)
else:
# Reading in hourly temperature data
weather_stations = read_weather_stations_raw(
local_paths['folder_path_weater_stations'])
# ------------------
# Read temperatures
# ------------------
path_t_min = os.path.join(path_weather_data, "t_min__{}.csv".format(weather_realisation))
path_t_max = os.path.join(path_weather_data, "t_max__{}.csv".format(weather_realisation))
df_t_min = pd.read_csv(path_t_min)
df_t_max = pd.read_csv(path_t_max)

for sim_yr in sim_yrs:
print(" ... load temperature for year: {}".format(sim_yr), flush=True)
if same_base_year_weather:
weather_data_yr = sim_yrs[0]
else:
weather_data_yr = sim_yr

for weather_yr_scenario in sim_yrs:
temp_data = read_weather_data.read_weather_data_script_data(
local_paths['weather_data'], weather_yr_scenario)
# Select all station values
df_timestep_t_min = df_t_min.loc[df_t_min['timestep'] == weather_data_yr]
df_timestep_t_max = df_t_max.loc[df_t_max['timestep'] == weather_data_yr]

for station in weather_stations:
try:
_ = temp_data[station]
for region in regions:
df_timestep_station_t_min = df_timestep_t_min.loc[df_timestep_t_min['region'] == region]
df_timestep_station_t_max = df_timestep_t_max.loc[df_timestep_t_max['region'] == region]

# Remove all non-uk stations
if weather_stations[station]['longitude'] > 2 or weather_stations[station]['longitude'] < -8.5:
pass
else:
temp_data_short[weather_yr_scenario][station] = temp_data[station]
except:
logging.debug("no data for weather station " + str(station))

for station_id in temp_data_short[weather_yr_scenario].keys():
try:
weather_stations_with_data[station_id] = weather_stations[station_id]
except:
del temp_data_short[weather_yr_scenario][station_id]
t_min = list(df_timestep_station_t_min['value'].values)
t_max = list(df_timestep_station_t_max['value'].values)

logging.info(
"Info: Number of weather stations: {} weather_yr_scenario: Number of temp data: {}, weather_yr_scenario: {}".format(
len(weather_stations_with_data), len(temp_data_short[weather_yr_scenario]), weather_yr_scenario))
temp_data_short[sim_yr][region] = {
't_min': np.array(t_min),
't_max': np.array(t_max)}

return dict(weather_stations_with_data), dict(temp_data_short)
return dict(temp_data_short)

def load_fuels(paths):
"""Load in ECUK fuel data, enduses and sectors
Expand Down
Loading

0 comments on commit 9d3e34f

Please sign in to comment.