Skip to content

Commit

Permalink
fixed calculation error
Browse files Browse the repository at this point in the history
  • Loading branch information
michaelweinold committed Oct 23, 2024
1 parent 99983dd commit 83ece95
Show file tree
Hide file tree
Showing 4 changed files with 57 additions and 36 deletions.
22 changes: 16 additions & 6 deletions app/index.py
Original file line number Diff line number Diff line change
Expand Up @@ -339,10 +339,10 @@ def update_production_based_on_user_data(df: pd.DataFrame) -> pd.DataFrame:
| UID | SupplyAmount | Branch |
|-----|-------------------|---------------|
| 0 | 1 | NaN |
| 1 | 0.25 | [0,1] |
| 1 | 0.25 | [0,1] | NOTA BENE!
| 2 | 0.2 * (0.25/0.5) | [0,1,2] |
| 3 | 0.1 | [0,3] |
| 4 | 0.18 | [0,1,2,4] |
| 4 | 0.18 | [0,1,2,4] | NOTA BENE!
| 5 | 0.05 * (0.1/0.18) | [0,1,2,4,5] |
| 6 | 0.01 * (0.1/0.18) | [0,1,2,4,5,6] |
Expand Down Expand Up @@ -391,10 +391,20 @@ def update_production_based_on_user_data(df: pd.DataFrame) -> pd.DataFrame:
def multiplier(row):
if not isinstance(row['Branch'], list):
return row['SupplyAmount']
for branch_UID in reversed(row['Branch']):
if branch_UID in dict_user_input:
return row['SupplyAmount'] * dict_user_input[branch_UID]
return row['SupplyAmount']
elif (
row['UID'] == row['Branch'][-1] and
np.isnan(row['SupplyAmount_USER'])
):
return row['SupplyAmount']
elif (
row['UID'] == row['Branch'][-1] and not
np.isnan(row['SupplyAmount_USER'])
):
return row['SupplyAmount_USER']
else:
for branch_UID in reversed(row['Branch']):
if branch_UID in dict_user_input:
return row['SupplyAmount'] * dict_user_input[branch_UID]

df['SupplyAmount_EDITED'] = df.apply(multiplier, axis=1)

Expand Down
45 changes: 28 additions & 17 deletions dev/test_edit_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,10 +81,7 @@ def create_user_input_column(
return df_merged


def update_production_based_on_user_data(
df: pd.DataFrame,
column_name: str
) -> pd.DataFrame:
def update_production_based_on_user_data(df: pd.DataFrame) -> pd.DataFrame:
"""
Updates the production amount of all nodes which are upstream
of a node with user-supplied production amount.
Expand Down Expand Up @@ -143,8 +140,8 @@ def update_production_based_on_user_data(
Output DataFrame.
"""

df_filtered = df[~df[f'{column_name}_USER'].isna()]
dict_user_input = df_filtered.set_index('UID').to_dict()[f'{column_name}_USER']
df_filtered = df[~df['SupplyAmount_USER'].isna()]
dict_user_input = df_filtered.set_index('UID').to_dict()['SupplyAmount_USER']

"""
For the example DataFrame from the docstrings above,
Expand All @@ -159,20 +156,34 @@ def update_production_based_on_user_data(
df = df.copy(deep=True)
def multiplier(row):
if not isinstance(row['Branch'], list):
return row[column_name]
for branch_UID in reversed(row['Branch']):
if branch_UID in dict_user_input:
return row[column_name] * dict_user_input[branch_UID]
return row[column_name]

df[column_name] = df.apply(multiplier, axis=1)
df.drop(columns=[f'{column_name}_USER'], inplace=True)

return df, dict_user_input

return row['SupplyAmount']
elif (
row['UID'] == row['Branch'][-1] and
np.isnan(row['SupplyAmount_USER'])
):
return row['SupplyAmount']
elif (
row['UID'] == row['Branch'][-1] and not
np.isnan(row['SupplyAmount_USER'])
):
return row['SupplyAmount_USER']
else:
for branch_UID in reversed(row['Branch']):
if branch_UID in dict_user_input:
return row['SupplyAmount'] * dict_user_input[branch_UID]

df['SupplyAmount_EDITED'] = df.apply(multiplier, axis=1)

df.drop(columns=['SupplyAmount_USER'], inplace=True)
df['SupplyAmount'] = df['SupplyAmount_EDITED']
df.drop(columns=['SupplyAmount_EDITED'], inplace=True)

return df

df_user_col = create_user_input_column(
df_original=df_original,
df_user_input=df_user_input,
column_name='SupplyAmount'
)

df_updated = update_production_based_on_user_data(df_user_col)
24 changes: 12 additions & 12 deletions pyodide/index.html

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion pyodide/index.js

Large diffs are not rendered by default.

0 comments on commit 83ece95

Please sign in to comment.