From eb0d1baffa3a7a484f1c0a74b91cbc0ede672e0e Mon Sep 17 00:00:00 2001 From: dannys4 Date: Sun, 23 Jul 2023 15:28:43 -0600 Subject: [PATCH 1/5] Adding more doxygen docs --- MParT/HermiteFunction.h | 45 +++++++++++-------- MParT/LinearizedBasis.h | 35 +++++++++------ MParT/MonotoneComponent.h | 4 +- MParT/OrthogonalPolynomial.h | 11 +++-- MParT/ParameterizedFunctionBase.h | 4 +- MParT/TrainMap.h | 15 +++++-- MParT/TrainMapAdaptive.h | 25 ++++++++++- docs/source/api/index.rst | 3 +- docs/source/api/maptraining.rst | 11 +++++ docs/source/api/maptraining/trainmap.rst | 9 ++++ .../api/maptraining/trainmapadaptive.rst | 9 ++++ docs/source/api/quadrature.rst | 4 +- 12 files changed, 126 insertions(+), 49 deletions(-) create mode 100644 docs/source/api/maptraining.rst create mode 100644 docs/source/api/maptraining/trainmap.rst create mode 100644 docs/source/api/maptraining/trainmapadaptive.rst diff --git a/MParT/HermiteFunction.h b/MParT/HermiteFunction.h index a4169a10..f1b51138 100644 --- a/MParT/HermiteFunction.h +++ b/MParT/HermiteFunction.h @@ -8,20 +8,27 @@ namespace mpart{ -class HermiteFunction +/** + * @brief Class representing a Hermite function for multivariate expansions + * @details The \f$k\f$th Hermite function can be represented as +\f[ \psi_k(x) = \mathrm{He}_k(x)\exp(-x^2/4) \f] + * where \f$\mathrm{He}_k(x)\f$ is the \f$k\f$th probabilist Hermite polynomial. + * + */ +class HermiteFunction { public: KOKKOS_INLINE_FUNCTION void EvaluateAll(double* output, unsigned int maxOrder, double x) const - { - + { + output[0] = 1.0; if(maxOrder>0){ output[1] = x; - + if(maxOrder>1){ // Evaluate all of the physicist hermite polynomials output[2] = std::pow(M_PI, -0.25) * std::exp(-0.5*x*x); @@ -39,7 +46,7 @@ class HermiteFunction double* derivs, unsigned int maxOrder, double x) const - { + { vals[0] = 1.0; derivs[0] = 0.0; @@ -51,7 +58,7 @@ class HermiteFunction // Evaluate all of the physicist hermite polynomials polyBase.EvaluateDerivatives(&vals[2], &derivs[2], maxOrder-2, x); - // Add the scaling + // Add the scaling const double baseScaling = std::pow(M_PI, -0.25) * std::exp(-0.5*x*x); double scale; double currFactorial = 1; @@ -78,18 +85,18 @@ class HermiteFunction double* derivs2, unsigned int maxOrder, double x) const - { + { // Evaluate all of the physicist hermite polynomials EvaluateDerivatives(vals, derivs, maxOrder, x); derivs2[0] = 0.0; - + if(maxOrder>0){ derivs2[1] = 0.0; if(maxOrder>1){ - - // Add the scaling + + // Add the scaling for(unsigned int i=0; i<=maxOrder-2; ++i) derivs2[i+2] = -(2.0*i + 1.0 - x*x)*vals[i+2]; } @@ -97,7 +104,7 @@ class HermiteFunction } - KOKKOS_INLINE_FUNCTION double Evaluate(unsigned int const order, + KOKKOS_INLINE_FUNCTION double Evaluate(unsigned int const order, double const x) const { if(order==0){ @@ -110,9 +117,9 @@ class HermiteFunction } } - KOKKOS_INLINE_FUNCTION double Derivative(unsigned int const order, - double const x) const - { + KOKKOS_INLINE_FUNCTION double Derivative(unsigned int const order, + double const x) const + { if(order==0){ return 0.0; }else if(order==1){ @@ -124,11 +131,11 @@ class HermiteFunction } } - KOKKOS_INLINE_FUNCTION double SecondDerivative(unsigned int const order, + KOKKOS_INLINE_FUNCTION double SecondDerivative(unsigned int const order, double const x) const - { + { if(order<2){ - return 0; + return 0; }else{ return -(2.0*order+1.0-x*x)*Evaluate(order-2, x); } @@ -136,9 +143,9 @@ class HermiteFunction private: PhysicistHermite polyBase; - + }; // class HermiteFunction } -#endif \ No newline at end of file +#endif \ No newline at end of file diff --git a/MParT/LinearizedBasis.h b/MParT/LinearizedBasis.h index b7786589..92c75e09 100644 --- a/MParT/LinearizedBasis.h +++ b/MParT/LinearizedBasis.h @@ -6,6 +6,13 @@ namespace mpart{ +/** + * @brief Basis that is linear outside a given upper and lower bound. + * @details Implemented to be piecewise-defined mapping \f$F_{ab}[\psi]\f$, where \f$\psi\f$ is a real-valued univariate function. We define + * \f[ F_{ab}[\psi](x) = \begin{cases}\psi(x) & x\in[a,b]\\\psi(a)+(x-a)\psi^\prime(a) & xb \f] + * + * @tparam OtherBasis type of basis inside bounds (e.g. OrthogonalPolynomial) + */ template class LinearizedBasis { @@ -30,8 +37,8 @@ class LinearizedBasis KOKKOS_INLINE_FUNCTION void EvaluateAll(double* output, unsigned int maxOrder, double x) const - { - + { + if(xub_){ EvaluateDerivatives(vals, derivs, maxOrder, x); for(unsigned int i=0; i<=maxOrder; ++i) @@ -102,7 +109,7 @@ class LinearizedBasis } - KOKKOS_INLINE_FUNCTION double Evaluate(unsigned int const order, + KOKKOS_INLINE_FUNCTION double Evaluate(unsigned int const order, double const x) const { if(xub_){ @@ -126,9 +133,9 @@ class LinearizedBasis } } - KOKKOS_INLINE_FUNCTION double SecondDerivative(unsigned int const order, + KOKKOS_INLINE_FUNCTION double SecondDerivative(unsigned int const order, double const x) const - { + { if(xub_){ @@ -148,4 +155,4 @@ class LinearizedBasis } -#endif \ No newline at end of file +#endif \ No newline at end of file diff --git a/MParT/MonotoneComponent.h b/MParT/MonotoneComponent.h index 251deb5c..bab9995b 100644 --- a/MParT/MonotoneComponent.h +++ b/MParT/MonotoneComponent.h @@ -28,9 +28,9 @@ namespace mpart{ The function \f$T\f$ is based on another (generally non-monotone) function \f$f : R^N\rightarrow R\f$ and a strictly positve function \f$g : R\rightarrow R_{>0}\f$. Together, these functions define the monotone component $T$ through -$$ +\f[ T(x_1, x_2, ..., x_D) = f(x_1,x_2,..., x_{D-1}, 0) + \int_0^{x_D} g\left( \partial_D f(x_1,x_2,..., x_{D-1}, t) \right) dt -$$ +\f] @tparam ExpansionType A class defining the function \f$f\f$. It must satisfy the cached parameterization concept. @tparam PosFuncType A class defining the function \f$g\f$. This class must have `Evaluate` and `Derivative` functions accepting a double and returning a double. The MParT::SoftPlus and MParT::Exp classes in PositiveBijectors.h are examples of classes defining this interface. diff --git a/MParT/OrthogonalPolynomial.h b/MParT/OrthogonalPolynomial.h index 4919a8b5..c3706d1a 100644 --- a/MParT/OrthogonalPolynomial.h +++ b/MParT/OrthogonalPolynomial.h @@ -8,9 +8,12 @@ namespace mpart{ -/* -p_{k}(x) = (a_k x + b_k) p_{k-1}(x) - c_k p_{k-2}(x) -*/ +/** + * @brief Generic class to represent orthogonal polynomials. + * @details An orthogonal polynomial has form + \f[ p_{k}(x) = (a_k x + b_k) p_{k-1}(x) - c_k p_{k-2}(x) \f] + * where \f(a_k,b_k,c_k\f) are all given explicitly (three-term recurrence). + */ template class OrthogonalPolynomial : public Mixer { @@ -33,7 +36,7 @@ class OrthogonalPolynomial : public Mixer if(normalize_){ for(unsigned int order=0; order<=maxOrder; ++order){ - output[order] /= this->Normalization(order); + output[order] /= this->Normalization(order); } } } diff --git a/MParT/ParameterizedFunctionBase.h b/MParT/ParameterizedFunctionBase.h index 85af08e7..b92cde8c 100644 --- a/MParT/ParameterizedFunctionBase.h +++ b/MParT/ParameterizedFunctionBase.h @@ -41,7 +41,7 @@ namespace mpart { virtual Kokkos::View& Coeffs(){return this->savedCoeffs;}; /** @brief Set the internally stored view of coefficients. - @detail Performs a deep copy of the input coefficients to the internally stored coefficients. + @details Performs a deep copy of the input coefficients to the internally stored coefficients. @param coeffs A view containing the coefficients to copy. */ virtual void SetCoeffs(Kokkos::View coeffs); @@ -51,7 +51,7 @@ namespace mpart { #endif /** @brief Wrap the internal coefficient view around another view. - @detail Performs a shallow copy of the input coefficients to the internally stored coefficients. + @details Performs a shallow copy of the input coefficients to the internally stored coefficients. If values in the view passed to this function are changed, the values will also change in the internally stored view. @param coeffs A view containing the coefficients we want to wrap. diff --git a/MParT/TrainMap.h b/MParT/TrainMap.h index 3adccd16..8eafe9cf 100644 --- a/MParT/TrainMap.h +++ b/MParT/TrainMap.h @@ -10,20 +10,30 @@ namespace mpart { /** * @brief TrainOptions adds options for training your map, - * with fields largely based on nlopt settings. verbose is an integer - * where 0=nothing, 1=some diagnostics, 2=debugging + * with fields largely based on NLopt settings. For documentation + * of such fields, see NLOpt. * */ struct TrainOptions { + /** NLOpt: Optimization Algorithm to use */ std::string opt_alg = "LD_SLSQP"; + /** NLOpt: Lower bound on optimizer */ double opt_stopval = -std::numeric_limits::infinity(); + /** NLOpt: Relative tolerance on function value change */ double opt_ftol_rel = 1e-3; + /** NLOpt: Absolute tolerance of function value change */ double opt_ftol_abs = 1e-3; + /** NLOpt: Relative tolerance of minimizer value change */ double opt_xtol_rel = 1e-4; + /** NLOpt: Absolute tolerance of minimizer value change */ double opt_xtol_abs = 1e-4; + /** NLOpt: Maximum number of evaluations of function to optimize */ int opt_maxeval = 1000; + /** NLOpt: Maximum amount of time to spend optimizing */ double opt_maxtime = std::numeric_limits::infinity(); + /** Verbosity of map training (1: verbose, 2: debug) */ int verbose = 0; + /** * @brief Create a string representation of these training options (helpful for bindings) * @@ -47,7 +57,6 @@ struct TrainOptions { /** * @brief Function to train a map inplace given an objective and optimization options * - * @tparam ObjectiveType * @param map Map to optimize (inplace) * @param objective MapObjective to optimize over * @param options Options for optimizing the map diff --git a/MParT/TrainMapAdaptive.h b/MParT/TrainMapAdaptive.h index 89d18902..9a21ae9b 100644 --- a/MParT/TrainMapAdaptive.h +++ b/MParT/TrainMapAdaptive.h @@ -12,11 +12,23 @@ namespace mpart { -// Options specifically for ATM algorithm, with map eval opts -> training opts-> ATM specific opts +/** + * @brief Both map and training options combined with special ATM options. + * + */ struct ATMOptions: public MapOptions, public TrainOptions { + /** Maximum number of iterations that do not improve error */ unsigned int maxPatience = 10; - unsigned int maxSize = 10; + /** Maximum number of coefficients in final expansion (including ALL dimensions of map) */ + unsigned int maxSize = std::numeric_limits::infinity(); + /** Multiindex representing the maximum degree in each input dimension */ MultiIndex maxDegrees; + + /** + * @brief Create a string representation of these options. + * + * @return std::string + */ std::string String() override { std::string md_str = maxDegrees.String(); std::stringstream ss; @@ -24,10 +36,19 @@ struct ATMOptions: public MapOptions, public TrainOptions { ss << "maxPatience = " << maxPatience << "\n"; ss << "maxSize = " << maxSize << "\n"; ss << "maxDegrees = " << maxDegrees.String(); + return ss.str(); } }; +/** + * @brief Adaptively discover new terms in coefficient basis to add to map using the ATM algorithm of Baptista, et al. 2022. + * + * @tparam MemorySpace Device or host space to work in + * @param mset0 vector storing initial (minimal) guess of multiindex sets, corresponding to each dimension. Is changed in-place. + * @param objective What this map should be adapted to fits + * @return std::shared_ptr> New map according to specifications. + */ template std::shared_ptr> TrainMapAdaptive(std::vector &mset0, std::shared_ptr> objective, diff --git a/docs/source/api/index.rst b/docs/source/api/index.rst index 60e40e0d..e3884e99 100644 --- a/docs/source/api/index.rst +++ b/docs/source/api/index.rst @@ -19,4 +19,5 @@ API Reference multivariateexpansionworker templateconcepts utilities/initialization - utilities/serialization \ No newline at end of file + utilities/serialization + maptraining diff --git a/docs/source/api/maptraining.rst b/docs/source/api/maptraining.rst new file mode 100644 index 00000000..79bf581a --- /dev/null +++ b/docs/source/api/maptraining.rst @@ -0,0 +1,11 @@ +=================== +Map Training +=================== + +Functions and Classes +------------------- + +.. toctree:: + + maptraining/trainmap + maptraining/trainmapadaptive diff --git a/docs/source/api/maptraining/trainmap.rst b/docs/source/api/maptraining/trainmap.rst new file mode 100644 index 00000000..79742156 --- /dev/null +++ b/docs/source/api/maptraining/trainmap.rst @@ -0,0 +1,9 @@ +============================== +Train Map +============================== + +.. doxygenfunction:: mpart::TrainMap + +.. doxygenstruct:: mpart::TrainOptions + :members: + :undoc-members: diff --git a/docs/source/api/maptraining/trainmapadaptive.rst b/docs/source/api/maptraining/trainmapadaptive.rst new file mode 100644 index 00000000..3e718529 --- /dev/null +++ b/docs/source/api/maptraining/trainmapadaptive.rst @@ -0,0 +1,9 @@ +============================== +Adaptive Map Training +============================== + +.. doxygenfunction:: mpart::TrainMapAdaptive + +.. doxygenstruct:: mpart::ATMOptions + :members: + :undoc-members: diff --git a/docs/source/api/quadrature.rst b/docs/source/api/quadrature.rst index 1c692b9b..05a5f576 100644 --- a/docs/source/api/quadrature.rst +++ b/docs/source/api/quadrature.rst @@ -8,5 +8,5 @@ C++ Objects .. toctree:: quadrature/clenshawcurtis - quadrature/adaptivesimpson - quadrature/adaptiveclenshawcurtis + quadrature/adaptivesimpson + quadrature/recursivequadrature From d2dc060187a120126a3e8ba8070dfbe4c94dd660 Mon Sep 17 00:00:00 2001 From: dannys4 Date: Wed, 26 Jul 2023 12:22:39 -0400 Subject: [PATCH 2/5] Add NLOpt link --- MParT/TrainMap.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MParT/TrainMap.h b/MParT/TrainMap.h index 8eafe9cf..3e97f0a7 100644 --- a/MParT/TrainMap.h +++ b/MParT/TrainMap.h @@ -11,7 +11,7 @@ namespace mpart { /** * @brief TrainOptions adds options for training your map, * with fields largely based on NLopt settings. For documentation - * of such fields, see NLOpt. + * of such fields, see NLOpt. * */ struct TrainOptions { From f0b553d10e2b78584f20e8ef3256f2c3ad04b13d Mon Sep 17 00:00:00 2001 From: dannys4 Date: Wed, 26 Jul 2023 14:09:40 -0400 Subject: [PATCH 3/5] Fix docker --- .docker/Dockerfile | 2 +- .github/workflows/build-push-docker.yml | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.docker/Dockerfile b/.docker/Dockerfile index b82d00d1..5871de3c 100644 --- a/.docker/Dockerfile +++ b/.docker/Dockerfile @@ -7,7 +7,7 @@ RUN --mount=type=cache,target=/opt/conda/pkgs conda env create -f MParT_/.docker SHELL ["conda", "run", "-n", "mpart", "/bin/bash", "-c"] -RUN git clone --depth=1 --branch 3.7.00 https://github.com/kokkos/kokkos.git && \ +RUN git clone --depth=1 --branch tags/3.7.01 https://github.com/kokkos/kokkos.git && \ mkdir kokkos/build && \ cd kokkos/build && \ cmake \ diff --git a/.github/workflows/build-push-docker.yml b/.github/workflows/build-push-docker.yml index 2c784397..49aee263 100644 --- a/.github/workflows/build-push-docker.yml +++ b/.github/workflows/build-push-docker.yml @@ -4,6 +4,7 @@ on: push: branches: - main + pull_request: {} jobs: docker: From 07abb817a8d86dae97156579b376be9e21b94257 Mon Sep 17 00:00:00 2001 From: dannys4 Date: Wed, 26 Jul 2023 14:31:46 -0400 Subject: [PATCH 4/5] Remove docker push on PR --- .docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.docker/Dockerfile b/.docker/Dockerfile index 5871de3c..08be8db4 100644 --- a/.docker/Dockerfile +++ b/.docker/Dockerfile @@ -7,7 +7,7 @@ RUN --mount=type=cache,target=/opt/conda/pkgs conda env create -f MParT_/.docker SHELL ["conda", "run", "-n", "mpart", "/bin/bash", "-c"] -RUN git clone --depth=1 --branch tags/3.7.01 https://github.com/kokkos/kokkos.git && \ +RUN git clone --depth=1 --branch 3.7.02 https://github.com/kokkos/kokkos.git && \ mkdir kokkos/build && \ cd kokkos/build && \ cmake \ From 738242a0d08a3a5bb51cd1972f92573449fb7083 Mon Sep 17 00:00:00 2001 From: dannys4 Date: Wed, 26 Jul 2023 14:41:49 -0400 Subject: [PATCH 5/5] Remove PR docker push --- .github/workflows/build-push-docker.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/build-push-docker.yml b/.github/workflows/build-push-docker.yml index 49aee263..2c784397 100644 --- a/.github/workflows/build-push-docker.yml +++ b/.github/workflows/build-push-docker.yml @@ -4,7 +4,6 @@ on: push: branches: - main - pull_request: {} jobs: docker: