Skip to content

Commit

Permalink
Add empty ca_conjugate_gradient.
Browse files Browse the repository at this point in the history
  • Loading branch information
ypodlesov committed May 20, 2024
1 parent e185165 commit 769c59c
Show file tree
Hide file tree
Showing 26 changed files with 247 additions and 130 deletions.
6 changes: 4 additions & 2 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,8 @@ function(add_if_exists name)
endif()
endfunction()


# set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=address")
# set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=leak")
set(DEPS_PATH ${CMAKE_CURRENT_SOURCE_DIR}/.deps)
set(BASIC_LA_PATH ${CMAKE_CURRENT_SOURCE_DIR}/basic_la)
set(METIS_INCLUDE_PATH ${DEPS_PATH}/include)
Expand All @@ -37,4 +38,5 @@ add_if_exists(basic_la)
add_if_exists(qr_decomposition)
add_if_exists(conjugate_gradient)

add_if_exists(matrix_powers_mv)
add_if_exists(matrix_powers_mv)
add_if_exists(ca_conjugate_gradient)
4 changes: 3 additions & 1 deletion basic_la/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
add_library(BASIC_LA STATIC matrix.cpp common_matrix.cpp vector.cpp)
add_library(BASIC_LA STATIC matrix.cpp common_matrix.cpp vector.cpp sparse_matrix.cpp matrix_powers_mv.cpp)
target_include_directories(BASIC_LA PUBLIC ${METIS_INCLUDE_PATH})
target_link_libraries(BASIC_LA PUBLIC ${METIS_LIB_PATH})
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@
#include <cassert>
#include <cstdint>
#include <cstring>
#include <iterator>
#include <thread>

bool ReorderMatrix(SparseMatrix<double>& sp_matrix) {
bool ReorderMatrix(const SparseMatrix<double>& sp_matrix, SparseMatrix<double>& res_matrix) {
SparseMatrix<double> a_no_diag;
sp_matrix.RemoveDiag(a_no_diag);

Expand All @@ -30,31 +31,35 @@ bool ReorderMatrix(SparseMatrix<double>& sp_matrix) {
return false;
}
for (int64_t i = 0; i < sp_matrix.row_cnt_; ++i) {
int64_t cur_row_cnt = sp_matrix.i_a_[i + 1] - sp_matrix.i_a_[i];
std::memcpy(&sp_matrix.data_[sp_matrix.i_a_[i]], &sp_matrix.data_[sp_matrix.i_a_[perm[i]]], cur_row_cnt * sizeof(double));
std::memcpy(&sp_matrix.j_a_[sp_matrix.i_a_[i]], &sp_matrix.j_a_[sp_matrix.i_a_[perm[i]]], cur_row_cnt * sizeof(int64_t));
sp_matrix.i_a_[i] = sp_matrix.i_a_[perm[i]];
int64_t cur_row_cnt = sp_matrix.i_a_[perm[i] + 1] - sp_matrix.i_a_[perm[i]];
std::memcpy(&res_matrix.data_[sp_matrix.i_a_[i]], &sp_matrix.data_[sp_matrix.i_a_[perm[i]]], cur_row_cnt * sizeof(double));
std::memcpy(&res_matrix.j_a_[sp_matrix.i_a_[i]], &sp_matrix.j_a_[sp_matrix.i_a_[perm[i]]], cur_row_cnt * sizeof(int64_t));
res_matrix.i_a_[i] = sp_matrix.i_a_[perm[i]];
}
return true;
}

bool MatrixPowersMV(SparseMatrix<double>& sp_matrix, const Vector<double>& x, std::vector<Vector<double>>& res) {
// if (!ReorderMatrix(sp_matrix)) {
bool MatrixPowersMV(const SparseMatrix<double>& sp_matrix, const Vector<double>& x, std::vector<Vector<double>>& res) {
// SparseMatrix<double> reordered;
// if (!ReorderMatrix(sp_matrix, reordered)) {
// return false;
// }
Vector<double> prev_x = x;
for (auto cur_x : res) {
res.front() = x;
for (auto cur_x_iter = std::next(res.begin()); cur_x_iter != res.end(); ++cur_x_iter) {
auto& prev_x = *std::prev(cur_x_iter);
auto& cur_x = *cur_x_iter;
assert(prev_x.mem_size_ == x.mem_size_);
assert(cur_x.mem_size_ == x.mem_size_);
NHelpers::Nullify(cur_x.data_, cur_x.mem_size_);

int64_t cores_num = std::thread::hardware_concurrency();
int64_t cores_num = std::min<int64_t>(x.mem_size_, std::thread::hardware_concurrency());
assert(sp_matrix.row_cnt_ % cores_num == 0);
const int64_t row_step = sp_matrix.row_cnt_ / cores_num;

{
std::vector<std::pair<SparseMatrix<double>, Vector<double>>> matrix_blocks(cores_num);
std::vector<std::thread> threads;
threads.reserve(cores_num);
{
int64_t cur_row_start = 0;
for (auto& [matrix_block, vector_block] : matrix_blocks) {
Expand Down Expand Up @@ -82,21 +87,20 @@ bool MatrixPowersMV(SparseMatrix<double>& sp_matrix, const Vector<double>& x, st
}
}, std::ref(matrix_block)
, std::ref(vector_block)
, cur_x
, prev_x
, std::ref(cur_x)
, std::ref(prev_x)
, local_row_start);
cur_row_start += row_step;
}
for (auto&& thread : threads) {
thread.join();
}
Vector<double> cmp_res(x.mem_size_);
sp_matrix.VecMult(prev_x, cmp_res);
cmp_res.PlusAX(cur_x, -1);
std::cout << cmp_res.Norm2() << std::endl;
// Vector<double> cmp_res(x.mem_size_);
// sp_matrix.VecMult(prev_x, cmp_res);
// cmp_res.PlusAX(cur_x, -1);
// std::cout << cmp_res.Norm2() << std::endl;
}
}
prev_x = cur_x;
}
return true;
}
8 changes: 8 additions & 0 deletions basic_la/matrix_powers_mv.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
#pragma once

#include <metis.h>
#include "sparse_matrix.h"
#include "vector.h"

bool ReorderMatrix(const SparseMatrix<double>& sp_matrix, SparseMatrix<double>& res_matrix);
bool MatrixPowersMV(const SparseMatrix<double>& sp_matrix, const Vector<double>& x, std::vector<Vector<double>>& res);
4 changes: 2 additions & 2 deletions basic_la/sparse_matrix.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ struct SparseMatrix: public CommonMatrix<SparseMatrix<T, Hold>, T, Hold> {

SparseMatrix() = default;

SparseMatrix(int64_t row_cnt, int64_t col_cnt, int64_t non_zero)
SparseMatrix(int64_t row_cnt, const int64_t col_cnt, const int64_t non_zero)
: Base(row_cnt, col_cnt, std::min(non_zero, row_cnt * col_cnt))
, j_a_{new int64_t[std::min(non_zero, row_cnt * col_cnt)]}
, i_a_{new int64_t[row_cnt + 1]}
Expand Down Expand Up @@ -49,7 +49,7 @@ struct SparseMatrix: public CommonMatrix<SparseMatrix<T, Hold>, T, Hold> {
std::memcpy(data_, &other.data_[other.i_a_[row_start]], mem_size_ * sizeof(T));
std::memcpy(j_a_, &other.j_a_[other.i_a_[row_start]], mem_size_ * sizeof(int64_t));
for (int64_t i = row_start; i < row_end; ++i) {
i_a_[i] = std::max<int64_t>(other.i_a_[i] - other.i_a_[row_start], 0);
i_a_[i - row_start] = std::max<int64_t>(other.i_a_[i] - other.i_a_[row_start], 0);
}
i_a_[row_end - row_start] = row_cnt_ * col_cnt_;
}
Expand Down
7 changes: 7 additions & 0 deletions ca_conjugate_gradient/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
add_catch(test_ca_conjugate_gradient test.cpp ca_conjugate_gradient.cpp)
target_include_directories(test_ca_conjugate_gradient PRIVATE ${BASIC_LA_PATH})
target_link_libraries(test_ca_conjugate_gradient PRIVATE BASIC_LA)

# add_catch(bench_conjugate_gradient run.cpp conjugate_gradient.cpp)
# target_include_directories(bench_conjugate_gradient PRIVATE ${BASIC_LA_PATH})
# target_link_libraries(bench_conjugate_gradient PRIVATE BASIC_LA)
46 changes: 46 additions & 0 deletions ca_conjugate_gradient/ca_conjugate_gradient.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
#include "ca_conjugate_gradient.h"
#include <matrix_powers_mv.h>

bool CAConjugateGradient(const SparseMatrix<double>& a, const Vector<double>& b, Vector<double>& x) {
assert(a.data_ && b.data_ && a.row_cnt_ == b.mem_size_);
if (!x.data_ || x.mem_size_ != a.col_cnt_) {
x = Vector<double>(a.col_cnt_);
}
Vector<double> current_residual(b);
Vector<double> current_p(b);
Vector<double> current_x(a.row_cnt_);
NHelpers::Nullify(current_x.data_, current_x.mem_size_);
int64_t n = b.mem_size_;

std::vector<Vector<double>> subspace(1 << 4);
for (auto& vec : subspace) {
vec = Vector<double>(n);
}

MatrixPowersMV(a, b, subspace);

double current_alpha, current_beta;
for (auto& vector : subspace) {
double ap_cur_p_dot_prod = NHelpers::InnerProd(vector.data_, current_p.data_, n);
if (NHelpers::RoughEq<double, double>(ap_cur_p_dot_prod, 0.0)) {
break;
}

double current_residual_norm = NHelpers::InnerProd(current_residual.data_, current_residual.data_, n);
current_alpha = current_residual_norm / ap_cur_p_dot_prod;

current_x.PlusAX(current_p, current_alpha);

Vector<double> next_residual(n);
next_residual.AXPlusBY(current_residual, 1, vector, -current_alpha);

if (NHelpers::RoughEq<double, double>(current_residual_norm, 0.0)) {
break;
}
current_beta = NHelpers::InnerProd(next_residual.data_, next_residual.data_, n) / current_residual_norm;
current_p.AXPlusBY(next_residual, 1, current_p, current_beta);
current_residual = std::move(next_residual);
}
x = std::move(current_x);
return true;
}
6 changes: 6 additions & 0 deletions ca_conjugate_gradient/ca_conjugate_gradient.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
#pragma once
#include <sparse_matrix.h>
#include <matrix.h>
#include <vector.h>

bool CAConjugateGradient(const SparseMatrix<double>& a, const Vector<double>& b, Vector<double>& x);
51 changes: 51 additions & 0 deletions ca_conjugate_gradient/test.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
#include "ca_conjugate_gradient.h"
#include <filesystem>
#include <fstream>
#include <sstream>
#include <iostream>
#include <matrix.h>

#include <catch2/catch_test_macros.hpp>
#include <catch2/generators/catch_generators_all.hpp>
#include <cstdint>

static void Test(const uint32_t n) {

std::stringstream file_name;
file_name << std::filesystem::current_path().string() << "/../" << "matrix_examples/sparse_spd/" << n;
std::ifstream fstream;
fstream.open(file_name.str());
REQUIRE(fstream.is_open());
SparseMatrix<double> a;
fstream >> a;
fstream.close();
REQUIRE(a.data_);
Vector<double> b(n);
NHelpers::GenRandomVector(b, n, true);

Vector<double> x(n);
REQUIRE(CAConjugateGradient(a, b, x));

Vector<double> result(n);
a.VecMult(x, result);
result.PlusAX(b, -1);
[[maybe_unused]] constexpr double eps = 0.001;
std::cout << result.Norm2() << std::endl;
// REQUIRE(NHelpers::RoughEq<double, double>(result.Norm2(), 0.0, eps));
}

TEST_CASE("Size 128") {
Test(128);
}

TEST_CASE("Size 256") {
Test(256);
}

TEST_CASE("Size 512") {
Test(512);
}

TEST_CASE("Size 1024") {
Test(1024);
}
8 changes: 4 additions & 4 deletions matrix_examples/sparse_spd/1024

Large diffs are not rendered by default.

8 changes: 4 additions & 4 deletions matrix_examples/sparse_spd/128

Large diffs are not rendered by default.

8 changes: 4 additions & 4 deletions matrix_examples/sparse_spd/16
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
16 16
224
10.17551649397471 -1.3675079273752777 -2.4716650624459584 -2.305217265544507 0.16159644910398754 -8.70317679867671 -6.172623557070354 -1.3248263809824643 -0.27069396638009213 -6.785984990744129 -7.798585316027067 -5.698821113567254 -4.99705956195648 0.6772591176017326 10.754688954917393 2.248750072756226 -2.7471602700195588 -7.869189226216772 -6.797065063338951 2.0556729319416323 -1.641730298434395 -1.5727269099910637 -6.529638827001246 1.077907503839971 -0.8038564190895209 0.8747161273005309 -8.37700171964974 -1.3675079273752777 2.248750072756226 26.425667015278506 1.9436388870150867 -7.695565883648802 -0.7088522990161666 1.014043784378096 6.318045815104874 5.552758205253476 -5.727534835050517 6.573972632088108 3.16859266797183 5.324944947497769 9.12269821576599 9.358550754890903 5.603970719496772 -2.4716650624459584 1.9436388870150867 38.979189188586396 0.7390006939089367 6.615695117451123 9.327772543967152 -2.4909139492408645 -0.38490252829165406 -5.541175358254228 13.533907305556728 1.7605178333327332 1.7603490426248003 12.637923718175081 1.0873376737823204 10.790652105957626 -2.305217265544507 -2.7471602700195588 -7.695565883648802 0.7390006939089367 10.531402664136449 -5.476630017405029 -3.6943995709398547 -8.3548427755908 -5.036852446674742 -2.1259720412257317 -7.369770260824481 -1.7980480456507666 -7.869189226216772 -0.7088522990161666 6.615695117451123 25.716807501391187 -1.0344995676793278 2.39816772341163 9.69110804273093 4.022560947252365 -5.8872075410569025 -1.8410134876924278 11.420996364327332 0.16159644910398754 -6.797065063338951 1.014043784378096 9.327772543967152 -1.0344995676793278 22.153494581912465 -0.6750420080845754 -1.2197556708083583 1.7007693149332597 6.826511162418436 1.4474332194895934 6.277390519914897 -7.207759375530426 6.685742189830751 -8.70317679867671 2.0556729319416323 6.318045815104874 -2.4909139492408645 -5.476630017405029 23.173909914569933 1.9639152417774441 7.649746449708185 11.644917114418687 9.72332420333828 3.441229220242059 1.0800275987237185 -6.172623557070354 -1.641730298434395 5.552758205253476 -0.38490252829165406 -3.6943995709398547 2.39816772341163 -0.6750420080845754 1.9639152417774441 22.595677055406508 3.3645628487322004 9.169734015671132 1.5515172516999098 13.970819533608307 10.070712905154743 2.5529256655943993 -1.3248263809824643 -5.727534835050517 -5.541175358254228 -1.2197556708083583 10.0 -8.075114798323385 -5.786003115974163 -7.061887885236889 -7.665162794731241 -5.11205941641569 -0.27069396638009213 -1.5727269099910637 6.573972632088108 13.533907305556728 -8.3548427755908 9.69110804273093 1.7007693149332597 7.649746449708185 3.3645628487322004 -8.075114798323385 41.537434770457736 15.344753577201908 9.28346218889487 13.199257239209661 6.439562073806472 7.961006347603333 -6.785984990744129 -6.529638827001246 3.16859266797183 1.7605178333327332 -5.036852446674742 4.022560947252365 6.826511162418436 11.644917114418687 9.169734015671132 -5.786003115974163 15.344753577201908 30.918189690219265 7.966140704427236 18.18484072078038 11.134195322851507 3.0219579022236265 -7.798585316027067 1.077907503839971 5.324944947497769 1.7603490426248003 -2.1259720412257317 -5.8872075410569025 1.4474332194895934 9.72332420333828 1.5515172516999098 9.28346218889487 7.966140704427236 33.60803896185102 10.447042285320729 5.9382866134431636 -6.18641978164441 -5.698821113567254 -0.8038564190895209 9.12269821576599 12.637923718175081 -7.369770260824481 -1.8410134876924278 6.277390519914897 3.441229220242059 13.970819533608307 -7.061887885236889 13.199257239209661 18.18484072078038 10.447042285320729 40.82079344526299 14.90660280659408 8.744432688334507 -4.99705956195648 0.8747161273005309 9.358550754890903 1.0873376737823204 -1.7980480456507666 -7.207759375530426 1.0800275987237185 10.070712905154743 -7.665162794731241 6.439562073806472 11.134195322851507 5.9382866134431636 14.90660280659408 24.77556621961301 0.6195039478794018 0.6772591176017326 -8.37700171964974 5.603970719496772 10.790652105957626 11.420996364327332 6.685742189830751 2.5529256655943993 -5.11205941641569 7.961006347603333 3.0219579022236265 -6.18641978164441 8.744432688334507 0.6195039478794018 27.847284896098785
0 2 3 4 6 7 8 9 10 11 12 13 14 15 1 2 4 5 6 7 8 10 11 12 13 14 15 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 0 2 3 4 5 6 7 8 9 10 11 12 13 14 15 0 1 2 3 4 7 8 10 11 12 13 14 1 2 3 5 6 8 10 11 12 13 15 0 1 2 3 5 6 8 9 10 11 12 13 14 15 0 1 2 3 4 7 8 10 11 12 13 14 0 1 2 3 4 5 6 7 8 10 11 12 13 14 15 0 2 3 6 9 10 11 13 14 15 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 0 1 2 3 4 5 6 7 8 10 11 12 13 14 15 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 0 1 2 3 4 6 7 8 9 10 11 12 13 14 15 0 1 2 3 5 6 8 9 10 11 12 13 14 15
0 14 27 43 58 70 81 95 107 122 132 148 164 179 195 210 224
200
0.18257233111400206 -0.002288101349616914 -0.05772346722210179 -0.038303067571804086 0.015530544667795335 0.0773255790447499 -0.004482321635508685 -0.07018001455727367 -0.02484318820961451 -0.03159311985781518 0.06448600719335278 0.042198967211588854 -0.002288101349616914 0.20441490879667598 0.013029241633982364 0.046758811201943855 0.05734234610146173 0.02468530017369837 -0.03316045665318156 -0.08695472868065848 -0.035091027494232145 0.013085366869618404 0.0766246773816154 0.09274561905346149 -0.05772346722210179 0.1087404523685832 -0.013418568697338797 0.00872155740448225 -0.027165388978591522 -0.04691507295261473 0.0046776382481710285 -0.02542013363674135 0.018089868982328724 -0.004373074229946257 -0.04337261934725073 -0.015095099958215886 -0.038303067571804086 0.013029241633982364 -0.013418568697338797 0.21656495100272763 0.010320609716987832 0.01227082996274501 0.07009050949060423 0.044802142431218564 0.03635708204607762 -0.014592744868318037 -0.056535404128267186 0.015530544667795335 0.046758811201943855 0.00872155740448225 0.010320609716987832 0.29673391034496943 -0.06377793726159367 0.13426525123236774 0.02465477211934236 -0.07471287861837471 0.026827873569954586 -0.008180882418728346 0.06932808185175013 0.029091908283632395 0.09585124481531987 0.018964666606200567 -0.06377793726159367 0.1 -0.02109383727069286 -0.028939972679506484 -0.011012769483027283 -0.05590037262547085 -0.07339233184920055 -0.039904454811838574 0.0773255790447499 0.05734234610146173 -0.027165388978591522 0.01227082996274501 0.13426525123236774 -0.02109383727069286 0.4638377815979996 0.03144056737270592 -0.07156272230956642 -0.011402147416844555 -0.06127375945400442 0.007327621130776091 0.11003735438431016 0.08330984693348249 0.09141397938846779 0.07284717038375231 -0.004482321635508685 0.02468530017369837 -0.04691507295261473 0.07009050949060423 0.02465477211934236 0.03144056737270592 0.16722683808392247 -0.01802021958745317 0.017410911794937957 -0.060246101862599266 0.06412807935694023 -0.02748824713966922 0.0046776382481710285 -0.07471287861837471 -0.07156272230956642 0.10338608304582973 -0.06542743670461652 -0.018401312577720434 0.013095026894457837 0.005443844787894925 0.016041121613620517 -0.019007918832056714 -0.07018001455727367 -0.03316045665318156 0.026827873569954586 -0.028939972679506484 -0.011402147416844555 -0.06542743670461652 0.15118271492427182 0.0031870924796451234 -0.061801459125462324 -0.04043455525969395 -0.007541747879166386 -0.03614487499551872 -0.02542013363674135 -0.06127375945400442 -0.018401312577720434 0.1 -0.07116354792164536 -0.029584002580805638 -0.08717379016235212 -0.08695472868065848 0.018089868982328724 -0.008180882418728346 -0.011012769483027283 0.007327621130776091 -0.01802021958745317 0.013095026894457837 0.0031870924796451234 -0.07116354792164536 0.15185531644482617 0.007058080820972815 -0.0049523580292037675 -0.0070258030484215755 -0.07933446254788898 -0.02484318820961451 -0.035091027494232145 -0.004373074229946257 0.044802142431218564 0.06932808185175013 0.11003735438431016 0.017410911794937957 0.005443844787894925 -0.061801459125462324 -0.029584002580805638 0.007058080820972815 0.3192799707529647 0.12164113629652021 0.051140512809421806 0.029596840818351546 -0.03159311985781518 0.013085366869618404 0.03635708204607762 0.029091908283632395 -0.05590037262547085 0.08330984693348249 -0.060246101862599266 -0.04043455525969395 -0.0049523580292037675 0.12164113629652021 0.2918002597884365 0.06588998558181029 0.06564844956676819 0.06448600719335278 0.0766246773816154 -0.04337261934725073 -0.014592744868318037 0.09585124481531987 -0.07339233184920055 0.09141397938846779 0.06412807935694023 0.016041121613620517 -0.007541747879166386 -0.08717379016235212 -0.0070258030484215755 0.051140512809421806 0.06588998558181029 0.41147656775940933 0.03705069710814989 0.042198967211588854 0.09274561905346149 -0.015095099958215886 -0.056535404128267186 0.018964666606200567 -0.039904454811838574 0.07284717038375231 -0.02748824713966922 -0.019007918832056714 -0.03614487499551872 -0.07933446254788898 0.029596840818351546 0.06564844956676819 0.03705069710814989 0.22579789683086882
0 1 2 3 4 6 7 9 12 13 14 15 0 1 3 4 6 7 9 11 12 13 14 15 0 2 3 4 6 7 8 10 11 12 14 15 0 1 2 3 4 6 7 12 13 14 15 0 1 2 3 4 5 6 7 8 9 11 12 13 14 15 4 5 6 9 11 13 14 15 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 0 1 2 3 4 6 7 11 12 13 14 15 2 4 6 8 9 10 11 12 14 15 0 1 4 5 6 8 9 11 12 13 14 15 2 6 8 10 11 12 14 1 2 4 5 6 7 8 9 10 11 12 13 14 15 0 1 2 3 4 6 7 8 9 10 11 12 13 14 15 0 1 3 4 5 6 7 9 11 12 13 14 15 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 0 1 2 3 4 5 6 7 8 9 11 12 13 14 15
0 12 24 36 47 62 70 86 98 108 120 127 141 156 169 185 200
8 changes: 4 additions & 4 deletions matrix_examples/sparse_spd/16384

Large diffs are not rendered by default.

8 changes: 4 additions & 4 deletions matrix_examples/sparse_spd/2048

Large diffs are not rendered by default.

8 changes: 4 additions & 4 deletions matrix_examples/sparse_spd/256

Large diffs are not rendered by default.

Loading

0 comments on commit 769c59c

Please sign in to comment.