Skip to content

Commit

Permalink
working closest results version with metric support for cavity only
Browse files Browse the repository at this point in the history
  • Loading branch information
shanto268 committed Dec 21, 2023
1 parent c6b9f99 commit 283f356
Show file tree
Hide file tree
Showing 7 changed files with 1,606 additions and 785 deletions.
3 changes: 2 additions & 1 deletion squadds/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,5 @@
__library_path__ = os.path.join(__repo_path__, "library")


from squadds.core.db import SQuADDS_DB
from squadds.core.db import SQuADDS_DB
from squadds.core.analysis import Analyzer
314 changes: 13 additions & 301 deletions squadds/core/analysis.py

Large diffs are not rendered by default.

100 changes: 29 additions & 71 deletions squadds/core/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,76 +6,7 @@
import pandas as pd
from addict import Dict
import numpy as np

# Function to convert numpy arrays to lists within an object
def convert_numpy(obj):
if isinstance(obj, np.ndarray):
return obj.tolist()
elif isinstance(obj, dict):
return {k: convert_numpy(v) for k, v in obj.items()}
elif isinstance(obj, list):
return [convert_numpy(v) for v in obj]
return obj

# Function to create a unified design_options dictionary
def create_unified_design_options(row):
cavity_dict = convert_numpy(row["design_options_cavity_claw"])
coupler_type = row["coupler_type"]
qubit_dict = convert_numpy(row["design_options_qubit"])

device_dict = {
"cavity_claw_options": {
"coupling_type": coupler_type,
"coupler_options": cavity_dict.get("cplr_opts", {}),
"cpw_options": {
"left_options": cavity_dict.get("cpw_opts", {})
}
},
"qubit_options": qubit_dict
}

return device_dict


def flatten_df_second_level(df):
# Initialize an empty dictionary to collect flattened data
flattened_data = {}

# Iterate over each column in the DataFrame
for column in df.columns:
# Check if the column contains dictionary-like data
if isinstance(df[column].iloc[0], dict):
# Iterate over second-level keys and create new columns
for key in df[column].iloc[0].keys():
flattened_data[f"{key}"] = df[column].apply(lambda x: x[key] if key in x else None)
else:
# For non-dictionary data, keep as is
flattened_data[column] = df[column]

# Create a new DataFrame with the flattened data
new_df = pd.DataFrame(flattened_data)
return new_df

def filter_df_by_conditions(df, conditions):
# Ensure conditions is a dictionary
if not isinstance(conditions, dict):
print("Conditions must be provided as a dictionary.")
return None

# Start with the original DataFrame
filtered_df = df

# Apply each condition
for column, value in conditions.items():
if column in filtered_df.columns:
filtered_df = filtered_df[filtered_df[column] == value]

# Check if the filtered DataFrame is empty
if filtered_df.empty:
print("Warning: No rows match the given conditions. Returning the original DataFrame.")
return df
else:
return filtered_df
from squadds.core.utils import *

class SQuADDS_DB(metaclass=SingletonMeta):

Expand All @@ -91,6 +22,8 @@ def __init__(self):
self.selected_coupler = None
self.selected_system = None
self.selected_df = None
self.target_param_keys = None
self.units = None

def supported_components(self):
components = []
Expand Down Expand Up @@ -370,6 +303,7 @@ def get_dataset(self, data_type=None, component=None, component_name=None):
config = f"{component}-{component_name}-{data_type}"
try:
df = load_dataset(self.repo_name, config)["train"].to_pandas()
self._set_target_param_keys(df)
return flatten_df_second_level(df)
except Exception as e:
print(f"An error occurred while loading the dataset: {e}")
Expand All @@ -390,7 +324,7 @@ def selected_system_df(self):
qubit_df = self.get_dataset(data_type="cap_matrix", component="qubit", component_name=self.selected_qubit) #TODO: handle dynamically
cavity_df = self.get_dataset(data_type="eigenmode", component="cavity_claw", component_name=self.selected_cavity) #TODO: handle dynamically
df = self.create_qubit_cavity_df(qubit_df, cavity_df, merger_terms=['claw_width', 'claw_length', 'claw_gap']) #TODO: handle with user awareness
self.selected_system_df = df
self.selected_df = df
else:
raise UserWarning("Selected system is either not specified or does not contain a cavity! Please check `self.selected_system`")
return df
Expand Down Expand Up @@ -434,6 +368,30 @@ def show_selections(self):
print("Selected system: ", self.selected_system)
print("Selected coupler: ", self.selected_coupler)

def _set_target_param_keys(self, df):
# ensure selected_df is not None
if self.selected_system is None:
raise UserWarning("No selected system df is created. Please check `self.selected_df`")
else:
# check if self.target_param_keys is None
if self.target_param_keys is None:
self.target_param_keys = get_sim_results_keys(df)
#check if target_param_keys is type list and system has more than one element
elif isinstance(self.target_param_keys, list) and len(self.selected_system) == 2:
self.target_param_keys += get_sim_results_keys(df)
#check if target_param_keys is type list and system has only one element
elif isinstance(self.target_param_keys, list) and len(self.selected_system) != 1:
self.target_param_keys = get_sim_results_keys(df)
else:
raise UserWarning("target_param_keys is not None or a list. Please check `self.target_param_keys`")

# update the attribute to remove any elements that start with "unit"
self.target_param_keys = [key for key in self.target_param_keys if not key.startswith("unit")]

def _get_units(self, df):
# TODO: needs implementation
raise NotImplementedError()

def unselect(self, param):
if param == "component":
self.selected_component = None
Expand Down
3 changes: 1 addition & 2 deletions squadds/core/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import logging
logging.basicConfig(level=logging.INFO)


class MetricStrategy(ABC):
"""Abstract class for metric strategies."""

Expand Down Expand Up @@ -109,7 +108,7 @@ def calculate(self, target_params: dict, row: pd.Series) -> float:
if isinstance(target_value, (int, float)):
simulated_value = row.get(param, 0)
weight = self.weights.get(param, 1)
distance += weight * ((target_value - simulated_value) ** 2) / target_value
distance += weight * ((target_value - simulated_value) ** 2) / target_value**2
return distance

class CustomMetric(MetricStrategy):
Expand Down
4 changes: 0 additions & 4 deletions squadds/core/selector.py

This file was deleted.

135 changes: 135 additions & 0 deletions squadds/core/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,141 @@
import os
from huggingface_hub import HfApi, HfFolder
from squadds.core.globals import ENV_FILE_PATH
import pandas as pd
import numpy as np

def get_sim_results_keys(dataframes):
# Initialize an empty list to store all keys
all_keys = []

# Ensure the input is a list, even if it's a single dataframe
if not isinstance(dataframes, list):
dataframes = [dataframes]

# Iterate over each dataframe
for df in dataframes:
# Check if 'sim_results' column exists in the dataframe
if 'sim_results' in df.columns:
# Extract keys from each row's 'sim_results' and add them to the list
for row in df['sim_results']:
if isinstance(row, dict): # Ensure the row is a dictionary
all_keys.extend(row.keys())

# Remove duplicates from the list
unique_keys = list(set(all_keys))

return unique_keys

def convert_numpy(obj):
"""
Converts NumPy arrays to Python lists recursively.
Args:
obj: The object to be converted.
Returns:
The converted object.
"""
if isinstance(obj, np.ndarray):
return obj.tolist()
elif isinstance(obj, dict):
return {k: convert_numpy(v) for k, v in obj.items()}
elif isinstance(obj, list):
return [convert_numpy(v) for v in obj]
return obj

# Function to create a unified design_options dictionary
def create_unified_design_options(row):
"""
Create a unified design options dictionary based on the given row.
Args:
row (pandas.Series): The row containing the design options.
Returns:
dict: The unified design options dictionary.
"""
cavity_dict = convert_numpy(row["design_options_cavity_claw"])
coupler_type = row["coupler_type"]
qubit_dict = convert_numpy(row["design_options_qubit"])

device_dict = {
"cavity_claw_options": {
"coupling_type": coupler_type,
"coupler_options": cavity_dict.get("cplr_opts", {}),
"cpw_options": {
"left_options": cavity_dict.get("cpw_opts", {})
}
},
"qubit_options": qubit_dict
}

return device_dict


def flatten_df_second_level(df):
"""
Flattens a DataFrame by expanding dictionary-like data in the second level of columns.
Args:
df (pandas.DataFrame): The DataFrame to be flattened.
Returns:
pandas.DataFrame: A new DataFrame with the flattened data.
"""
# Initialize an empty dictionary to collect flattened data
flattened_data = {}

# Iterate over each column in the DataFrame
for column in df.columns:
# Check if the column contains dictionary-like data
if isinstance(df[column].iloc[0], dict):
# Iterate over second-level keys and create new columns
for key in df[column].iloc[0].keys():
flattened_data[f"{key}"] = df[column].apply(lambda x: x[key] if key in x else None)
else:
# For non-dictionary data, keep as is
flattened_data[column] = df[column]

# Create a new DataFrame with the flattened data
new_df = pd.DataFrame(flattened_data)
return new_df

def filter_df_by_conditions(df, conditions):
"""
Filter a DataFrame based on given conditions.
Args:
df (pandas.DataFrame): The DataFrame to be filtered.
conditions (dict): A dictionary containing column-value pairs as conditions.
Returns:
pandas.DataFrame: The filtered DataFrame.
Raises:
None
"""
# Ensure conditions is a dictionary
if not isinstance(conditions, dict):
print("Conditions must be provided as a dictionary.")
return None

# Start with the original DataFrame
filtered_df = df

# Apply each condition
for column, value in conditions.items():
if column in filtered_df.columns:
filtered_df = filtered_df[filtered_df[column] == value]

# Check if the filtered DataFrame is empty
if filtered_df.empty:
print("Warning: No rows match the given conditions. Returning the original DataFrame.")
return df
else:
return filtered_df

def set_huggingface_api_key():
"""
Expand Down
Loading

0 comments on commit 283f356

Please sign in to comment.