Skip to content

Commit

Permalink
add documentation
Browse files Browse the repository at this point in the history
  • Loading branch information
AlexanderVNikitin committed Nov 20, 2023
1 parent 4398afe commit b1db07e
Show file tree
Hide file tree
Showing 3 changed files with 108 additions and 9 deletions.
14 changes: 14 additions & 0 deletions docs/modules/root.rst
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,22 @@ VAEs
:undoc-members:


ABC
--------------
.. automodule:: tsgm.optimization.abc
:members:
:undoc-members:


Visualization
--------------
.. automodule:: tsgm.utils.visualization
:members:
:undoc-members:


Monitors
--------------
.. automodule:: tsgm.models.monitors
:members:
:undoc-members:
64 changes: 64 additions & 0 deletions tsgm/models/monitors.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,28 @@


class GANMonitor(keras.callbacks.Callback):
"""
GANMonitor is a Keras callback for monitoring and visualizing generated samples during training.
:param num_samples: The number of samples to generate and visualize.
:type num_samples: int
:param latent_dim: The dimensionality of the latent space. Defaults to 128.
:type latent_dim: int
:param output_dim: The dimensionality of the output space. Defaults to 2.
:type output_dim: int
:param save: Whether to save the generated samples. Defaults to True.
:type save: bool
:param save_path: The path to save the generated samples. Defaults to None.
:type save_path: str
:raises ValueError: If the mode is not one of ['clf', 'reg']
:note: If `save` is True and `save_path` is not specified, the default save path is "/tmp/".
:warning: If `save_path` is specified but `save` is False, a warning is issued.
"""
def __init__(self, num_samples: int, latent_dim: int, labels: tsgm.types.Tensor,
save: bool = True, save_path: typing.Optional[str] = None, mode: str = "clf") -> None:
self._num_samples = num_samples
Expand All @@ -38,6 +60,15 @@ def __init__(self, num_samples: int, latent_dim: int, labels: tsgm.types.Tensor,
os.makedirs(self._save_path, exist_ok=True)

def on_epoch_end(self, epoch, logs=None) -> None:
"""
Callback function called at the end of each training epoch.
:param epoch: Current epoch number.
:type epoch: int
:param logs: Dictionary containing the training loss values.
:type logs: dict
"""
if self._mode in ["clf", "reg"]:
random_latent_vectors = tf.random.normal(shape=(self._num_samples, self._latent_dim))
elif self._mode == "temporal":
Expand All @@ -64,6 +95,30 @@ def on_epoch_end(self, epoch, logs=None) -> None:


class VAEMonitor(keras.callbacks.Callback):
"""
VAEMonitor is a Keras callback for monitoring and visualizing generated samples from a Variational Autoencoder (VAE) during training.
:param num_samples: The number of samples to generate and visualize. Defaults to 6.
:type num_samples: int
:param latent_dim: The dimensionality of the latent space. Defaults to 128.
:type latent_dim: int
:param output_dim: The dimensionality of the output space. Defaults to 2.
:type output_dim: int
:param save: Whether to save the generated samples. Defaults to True.
:type save: bool
:param save_path: The path to save the generated samples. Defaults to None.
:type save_path: str
:raises ValueError: If `output_dim` is less than or equal to 0.
:note: If `save` is True and `save_path` is not specified, the default save path is "/tmp/".
:warning: If `save_path` is specified but `save` is False, a warning is issued.
"""
def __init__(self, num_samples: int = 6, latent_dim: int = 128, output_dim: int = 2,
save: bool = True, save_path: typing.Optional[str] = None) -> None:
self._num_samples = num_samples
Expand All @@ -82,6 +137,15 @@ def __init__(self, num_samples: int = 6, latent_dim: int = 128, output_dim: int
os.makedirs(self._save_path, exist_ok=True)

def on_epoch_end(self, epoch, logs=None) -> None:
"""
Callback function called at the end of each training epoch.
:param epoch: The current epoch number.
:type epoch: int
:param logs: Dictionary containing the training loss values.
:type logs: dict
"""
labels = []
for i in range(self._output_dim):
if not len(labels):
Expand Down
39 changes: 30 additions & 9 deletions tsgm/optimization/abc.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import itertools
import typing
import typing as T
import tqdm

import numpy as np
Expand All @@ -24,8 +24,8 @@ class RejectionSampler(ABCAlgorithm):
Rejection sampling algorithm for approximate Bayesian computation.
"""
def __init__(self, simulator: tsgm.simulator.ModelBasedSimulator, data: tsgm.dataset.Dataset,
statistics: list, epsilon: float, discrepancy: typing.Callable, priors: dict = None,
**kwargs):
statistics: T.List, epsilon: float, discrepancy: T.Callable, priors: T.Dict = None,
**kwargs) -> None:
"""
:param simulator: A model based simulator
:type simulator: class `tsgm.simulator.ModelBasedSimulator`
Expand Down Expand Up @@ -53,19 +53,17 @@ def _calc_statistics(self, data: tsgm.dataset.Dataset) -> tsgm.types.Tensor:
# TODO measure both X & y
return np.array(list(itertools.chain.from_iterable(s(data.X) for s in self._statistics)))

def sample_parameters(self, n_samples: int) -> list:
def sample_parameters(self, n_samples: int) -> T.List:
"""
Samples parameters from the rejection sampler.
:param n_samples: Number of samples
:type simulator: int
...
...
:return: A list of samples. Each sample is represent as dict.
:rtype: typing.List[typing.Dict]
:rtype: T.List[T.Dict]
"""
cur_sim = self._simulator.clone()

samples: typing.List[typing.Dict] = []
samples: T.List[T.Dict] = []
for i in tqdm.tqdm(range(n_samples)):
err, params = None, None
while err is None or err > self._epsilon:
Expand All @@ -80,7 +78,30 @@ def sample_parameters(self, n_samples: int) -> list:
return samples


def prior_samples(priors, params):
def prior_samples(priors: T.Dict, params: T.List) -> T.List:
"""
Generate prior samples for the specified parameters.
:param priors: A dictionary containing probability distributions for each parameter.
Keys are parameter names, and values are instances of probability distribution classes.
If a parameter is not present in the dictionary, a default prior distribution is used.
:type priors: T.Dict
:param params: A list of parameter names for which prior samples are to be generated.
:type params: T.List
:returns: A dictionary where keys are parameter names and values are samples drawn from their respective prior distributions.
:rtype: T.Dict
Example:
.. code-block:: python
priors = {'mean': NormalDistribution(0, 1), 'std_dev': UniformDistribution(0, 2)}
params = ['mean', 'std_dev']
samples = prior_samples(priors, params)
"""
samples = {}
for var in params:
distr = priors.get(var, DEFAULT_PRIOR)
Expand Down

0 comments on commit b1db07e

Please sign in to comment.