diff --git a/.JuliaFormatter.toml b/.JuliaFormatter.toml
index 08f664c..4c49a86 100644
--- a/.JuliaFormatter.toml
+++ b/.JuliaFormatter.toml
@@ -1,2 +1,3 @@
+# See https://domluna.github.io/JuliaFormatter.jl/stable/ for a list of options
style = "blue"
indent = 2
diff --git a/.github/ISSUE_TEMPLATE/BUG_REPORT.md b/.github/ISSUE_TEMPLATE/BUG_REPORT.md
new file mode 100644
index 0000000..a9188f7
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/BUG_REPORT.md
@@ -0,0 +1,62 @@
+---
+name: BlockSparseArrays.jl bug report
+about: Create a bug report to help us improve BlockSparseArrays.jl
+title: "[BUG] YOUR SHORT DESCRIPTION OF THE BUG HERE"
+labels: ["bug"]
+assignees: ''
+
+---
+
+**Description of bug**
+
+Please give a brief description of the bug or unexpected behavior here.
+
+**Minimal code demonstrating the bug or unexpected behavior**
+
+If applicable, provide a minimal code that can be run to demonstrate the bug or unexpected behavior.
+
+If you are unable to construct a minimal code that demonstrates the bug or unexpected behavior, provide detailed steps for how to reproduce the behavior you are seeing.
+
+Minimal runnable code
+
+```julia
+[YOUR MINIMAL RUNNABLE CODE HERE]
+```
+
+
+
+
+**Expected output or behavior**
+
+Describe what you expected to happen.
+
+If you provided a minimal code that can be run to demonstrate the bug or unexpected behavior, describe what you expected the output would be.
+
+
+**Actual output or behavior**
+
+Describe what actually happened.
+
+If you provided a minimal code that demonstrates the bug or unexpected behavior, provide the output you get from that code. If the code leads to an error or warning, include the full error or warning below.
+
+Output of minimal runnable code
+
+```julia
+[OUTPUT OF YOUR MINIMAL RUNNABLE CODE HERE]
+```
+
+
+
+
+**Version information**
+
+ - Output from `versioninfo()`:
+```julia
+julia> versioninfo()
+[YOUR OUTPUT HERE]
+```
+ - Output from `using Pkg; Pkg.status("BlockSparseArrays")`:
+```julia
+julia> using Pkg; Pkg.status("BlockSparseArrays")
+[YOUR OUTPUT HERE]
+```
diff --git a/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md b/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md
new file mode 100644
index 0000000..88b5256
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md
@@ -0,0 +1,24 @@
+---
+name: BlockSparseArrays.jl feature request
+about: Suggest an idea for BlockSparseArrays.jl
+title: "[ENHANCEMENT] YOUR SHORT DESCRIPTION OF THE FEATURE REQUEST HERE"
+labels: ["enhancement"]
+assignees: ''
+
+---
+
+**Is your feature request related to a problem? Please describe.**
+
+A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
+
+**Describe the solution you'd like**
+
+A clear and concise description of what you want to happen.
+
+**Describe alternatives you've considered**
+
+A clear and concise description of any alternative solutions or features you've considered.
+
+**Additional context**
+
+Add any other context or screenshots about the feature request here.
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000..1a86427
--- /dev/null
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,42 @@
+# Description
+
+Please include a summary of the change and which issue is fixed (if applicable). Please also include relevant motivation and context. List any dependencies that are required for this change.
+
+Fixes #(issue)
+
+If practical and applicable, please include a minimal demonstration of the previous behavior and new behavior below.
+
+Minimal demonstration of previous behavior
+
+```julia
+[YOUR MINIMAL DEMONSTRATION OF PREVIOUS BEHAVIOR]
+```
+
+
+
+Minimal demonstration of new behavior
+
+```julia
+[YOUR MINIMAL DEMONSTRATION OF NEW BEHAVIOR]
+```
+
+
+
+# How Has This Been Tested?
+
+Please add tests that verify your changes to a file in the `test` directory.
+
+Please give a summary of the tests that you added to verify your changes.
+
+- [ ] Test A
+- [ ] Test B
+
+# Checklist:
+
+- [ ] My code follows the style guidelines of this project. Please run `using JuliaFormatter; format(".")` in the base directory of the repository (`~/.julia/dev/BlockSparseArrays`) to format your code according to our style guidelines.
+- [ ] I have performed a self-review of my own code.
+- [ ] I have commented my code, particularly in hard-to-understand areas.
+- [ ] I have added tests that verify the behavior of the changes I made.
+- [ ] I have made corresponding changes to the documentation.
+- [ ] My changes generate no new warnings.
+- [ ] Any dependent changes have been merged and published in downstream modules.
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000..700707c
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,7 @@
+# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
+version: 2
+updates:
+ - package-ecosystem: "github-actions"
+ directory: "/" # Location of package manifests
+ schedule:
+ interval: "weekly"
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
new file mode 100644
index 0000000..9071f89
--- /dev/null
+++ b/.github/workflows/CI.yml
@@ -0,0 +1,79 @@
+name: CI
+on:
+ push:
+ branches:
+ - main
+ tags: ['*']
+ pull_request:
+ workflow_dispatch:
+concurrency:
+ # Skip intermediate builds: always.
+ # Cancel intermediate builds: only if it is a pull request build.
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }}
+jobs:
+ test:
+ name: Julia ${{ matrix.version }} - ${{ matrix.os }} - ${{ matrix.arch }}
+ runs-on: ${{ matrix.os }}
+ timeout-minutes: 60
+ permissions: # needed to allow julia-actions/cache to proactively delete old caches that it has created
+ actions: write
+ contents: read
+ strategy:
+ fail-fast: false
+ matrix:
+ version:
+ # - 'lts' # TODO: Reenable once dependencies are registered.
+ - '1'
+ os:
+ - ubuntu-latest
+ - macOS-latest
+ - windows-latest
+ arch:
+ - x64
+ steps:
+ - uses: actions/checkout@v4
+ - uses: julia-actions/setup-julia@v2
+ with:
+ version: ${{ matrix.version }}
+ arch: ${{ matrix.arch }}
+ - uses: julia-actions/cache@v2
+ - uses: julia-actions/julia-buildpkg@v1
+ - uses: julia-actions/julia-runtest@v1
+ - uses: julia-actions/julia-processcoverage@v1
+ - uses: codecov/codecov-action@v5
+ with:
+ files: lcov.info
+ token: ${{ secrets.CODECOV_TOKEN }}
+ fail_ci_if_error: false
+ docs:
+ name: Documentation
+ runs-on: ubuntu-latest
+ permissions:
+ actions: write # needed to allow julia-actions/cache to proactively delete old caches that it has created
+ contents: write
+ statuses: write
+ steps:
+ - uses: actions/checkout@v4
+ - uses: julia-actions/setup-julia@v2
+ with:
+ version: '1'
+ - uses: julia-actions/cache@v2
+ - name: Configure doc environment
+ shell: julia --project=docs --color=yes {0}
+ run: |
+ using Pkg
+ Pkg.develop(PackageSpec(path=pwd()))
+ Pkg.instantiate()
+ - uses: julia-actions/julia-buildpkg@v1
+ - uses: julia-actions/julia-docdeploy@v1
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ DOCUMENTER_KEY: ${{ secrets.DOCUMENTER_KEY }}
+ - name: Run doctests
+ shell: julia --project=docs --color=yes {0}
+ run: |
+ using Documenter: DocMeta, doctest
+ using BlockSparseArrays
+ DocMeta.setdocmeta!(BlockSparseArrays, :DocTestSetup, :(using BlockSparseArrays); recursive=true)
+ doctest(BlockSparseArrays)
diff --git a/.github/workflows/CompatHelper.yml b/.github/workflows/CompatHelper.yml
new file mode 100644
index 0000000..cba9134
--- /dev/null
+++ b/.github/workflows/CompatHelper.yml
@@ -0,0 +1,16 @@
+name: CompatHelper
+on:
+ schedule:
+ - cron: 0 0 * * *
+ workflow_dispatch:
+jobs:
+ CompatHelper:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Pkg.add("CompatHelper")
+ run: julia -e 'using Pkg; Pkg.add("CompatHelper")'
+ - name: CompatHelper.main()
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ COMPATHELPER_PRIV: ${{ secrets.DOCUMENTER_KEY }}
+ run: julia -e 'using CompatHelper; CompatHelper.main()'
diff --git a/.github/workflows/FormatCheck.yml b/.github/workflows/FormatCheck.yml
new file mode 100644
index 0000000..bb6d933
--- /dev/null
+++ b/.github/workflows/FormatCheck.yml
@@ -0,0 +1,35 @@
+name: Format check
+on:
+ push:
+ branches: [main]
+ tags: [v*]
+ pull_request:
+
+jobs:
+ format:
+ name: "Format Check"
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: julia-actions/setup-julia@v2
+ with:
+ version: 1
+ - name: Install JuliaFormatter and format
+ run: |
+ julia -e 'using Pkg; Pkg.add(PackageSpec(name="JuliaFormatter"))'
+ julia -e 'using JuliaFormatter; format(".", verbose=true)'
+ - name: Check format
+ run: |
+ julia -e '
+ out = Cmd(`git diff --name-only`) |> read |> String
+ if out == ""
+ exit(0)
+ else
+ @error "The following files have not been formatted:"
+ write(stdout, out)
+ out_diff = Cmd(`git diff`) |> read |> String
+ @error "Diff:"
+ write(stdout, out_diff)
+ exit(1)
+ @error ""
+ end'
diff --git a/.github/workflows/LiterateCheck.yml b/.github/workflows/LiterateCheck.yml
new file mode 100644
index 0000000..05566d1
--- /dev/null
+++ b/.github/workflows/LiterateCheck.yml
@@ -0,0 +1,47 @@
+name: Literate check
+on:
+ push:
+ branches: [main]
+ tags: [v*]
+ pull_request:
+
+jobs:
+ literate:
+ name: "Literate Check"
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: julia-actions/setup-julia@v2
+ with:
+ version: 1
+ - name: Install Literate and generate docs
+ run: |
+ julia -e '
+ using Pkg
+ # TODO: Delete these once they are registered.
+ Pkg.add(url="https://github.com/ITensor/TypeParameterAccessors.jl")
+ Pkg.add(url="https://github.com/ITensor/BroadcastMapConversion.jl")
+ Pkg.add(url="https://github.com/ITensor/NestedPermutedDimsArrays.jl")
+ Pkg.add(url="https://github.com/ITensor/LabelledNumbers.jl")
+ Pkg.add(url="https://github.com/ITensor/GradedUnitRanges.jl")
+ Pkg.add(url="https://github.com/ITensor/SparseArraysBase.jl")
+ Pkg.add(url="https://github.com/ITensor/TensorAlgebra.jl")
+ Pkg.develop(PackageSpec(path=pwd()))
+ Pkg.instantiate()
+ Pkg.add(PackageSpec(name="Literate"))'
+ julia -e 'include("docs/make_readme.jl")'
+ - name: Check if docs need to be updated
+ run: |
+ julia -e '
+ out = Cmd(`git diff --name-only`) |> read |> String
+ if out == ""
+ exit(0)
+ else
+ @error "The docs are outdated, rerun Literate to regenerate them."
+ write(stdout, out)
+ out_diff = Cmd(`git diff`) |> read |> String
+ @error "Diff:"
+ write(stdout, out_diff)
+ exit(1)
+ @error ""
+ end'
diff --git a/.github/workflows/Register.yml b/.github/workflows/Register.yml
new file mode 100644
index 0000000..5b7cd3b
--- /dev/null
+++ b/.github/workflows/Register.yml
@@ -0,0 +1,16 @@
+name: Register Package
+on:
+ workflow_dispatch:
+ inputs:
+ version:
+ description: Version to register or component to bump
+ required: true
+jobs:
+ register:
+ runs-on: ubuntu-latest
+ permissions:
+ contents: write
+ steps:
+ - uses: julia-actions/RegisterAction@latest
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/TagBot.yml b/.github/workflows/TagBot.yml
new file mode 100644
index 0000000..0cd3114
--- /dev/null
+++ b/.github/workflows/TagBot.yml
@@ -0,0 +1,31 @@
+name: TagBot
+on:
+ issue_comment:
+ types:
+ - created
+ workflow_dispatch:
+ inputs:
+ lookback:
+ default: "3"
+permissions:
+ actions: read
+ checks: read
+ contents: write
+ deployments: read
+ issues: read
+ discussions: read
+ packages: read
+ pages: read
+ pull-requests: read
+ repository-projects: read
+ security-events: read
+ statuses: read
+jobs:
+ TagBot:
+ if: github.event_name == 'workflow_dispatch' || github.actor == 'JuliaTagBot'
+ runs-on: ubuntu-latest
+ steps:
+ - uses: JuliaRegistries/TagBot@v1
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
+ ssh: ${{ secrets.DOCUMENTER_KEY }}
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..10593a9
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,14 @@
+*.jl.*.cov
+*.jl.cov
+*.jl.mem
+*.o
+*.swp
+.DS_Store
+.benchmarkci
+.tmp
+.vscode/
+Manifest.toml
+benchmark/*.json
+docs/Manifest.toml
+docs/build/
+docs/src/index.md
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
new file mode 100644
index 0000000..bff1fb7
--- /dev/null
+++ b/.pre-commit-config.yaml
@@ -0,0 +1,13 @@
+repos:
+- repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.5.0
+ hooks:
+ - id: check-merge-conflict
+ - id: check-toml
+ - id: check-yaml
+ - id: end-of-file-fixer
+ exclude_types: [markdown] # incompatible with Literate.jl
+- repo: https://github.com/qiaojunfeng/pre-commit-julia-format
+ rev: v0.2.0
+ hooks:
+ - id: julia-format
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..7f5c8c6
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2024 ITensor developers
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/Project.toml b/Project.toml
index 4b6396f..0c761de 100644
--- a/Project.toml
+++ b/Project.toml
@@ -1,3 +1,50 @@
+name = "BlockSparseArrays"
+uuid = "2c9a651f-6452-4ace-a6ac-809f4280fbb4"
+authors = ["ITensor developers and contributors"]
+version = "0.1.0"
+
[deps]
+Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"
+ArrayLayouts = "4c555306-a7a7-4459-81d9-ec55ddd5c99a"
BlockArrays = "8e7c35d0-a365-5155-bbbb-fb81a777f24e"
+BroadcastMapConversion = "4a4adec5-520f-4750-bb37-d5e66b4ddeb2"
+Dictionaries = "85a47980-9c8c-11e8-2b9f-f7ca1fa99fb4"
+GPUArraysCore = "46192b85-c4d5-4398-a991-12ede77f4527"
+GradedUnitRanges = "e2de450a-8a67-46c7-b59c-01d5a3d041c5"
+LabelledNumbers = "f856a3a6-4152-4ec4-b2a7-02c1a55d7993"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
+MacroTools = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09"
+NestedPermutedDimsArrays = "2c2a8ec4-3cfc-4276-aa3e-1307b4294e58"
+SparseArraysBase = "0d5efcca-f356-4864-8770-e1ed8d78f208"
+SplitApplyCombine = "03a91e81-4c3e-53e1-a0a4-9c0c8f19dd66"
+TensorAlgebra = "68bd88dc-f39d-4e12-b2ca-f046b68fcc6a"
+TypeParameterAccessors = "7e5a90cf-f82e-492e-a09b-e3e26432c138"
+
+[sources]
+BroadcastMapConversion = {url = "https://github.com/ITensor/BroadcastMapConversion.jl"}
+GradedUnitRanges = {url = "https://github.com/ITensor/GradedUnitRanges.jl"}
+LabelledNumbers = {url = "https://github.com/ITensor/LabelledNumbers.jl"}
+NestedPermutedDimsArrays = {url = "https://github.com/ITensor/NestedPermutedDimsArrays.jl"}
+SparseArraysBase = {url = "https://github.com/ITensor/SparseArraysBase.jl"}
+TensorAlgebra = {url = "https://github.com/ITensor/TensorAlgebra.jl"}
+TypeParameterAccessors = {url = "https://github.com/ITensor/TypeParameterAccessors.jl"}
+
+[compat]
+Adapt = "4.1.1"
+Aqua = "0.8.9"
+ArrayLayouts = "1.10.4"
+BlockArrays = "1.2.0"
+Dictionaries = "0.4.3"
+GPUArraysCore = "0.1.0"
+LinearAlgebra = "1.10"
+MacroTools = "0.5.13"
+SplitApplyCombine = "1.2.3"
+Test = "1.10"
+julia = "1.10"
+
+[extras]
+Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595"
+Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
+
+[targets]
+test = ["Aqua", "Test"]
diff --git a/README.md b/README.md
index dd74c97..4a1efe2 100644
--- a/README.md
+++ b/README.md
@@ -1,15 +1,28 @@
# BlockSparseArrays.jl
-A Julia `BlockSparseArray` type based on the `BlockArrays.jl` interface.
+[![Stable](https://img.shields.io/badge/docs-stable-blue.svg)](https://ITensor.github.io/BlockSparseArrays.jl/stable/)
+[![Dev](https://img.shields.io/badge/docs-dev-blue.svg)](https://ITensor.github.io/BlockSparseArrays.jl/dev/)
+[![Build Status](https://github.com/ITensor/BlockSparseArrays.jl/actions/workflows/CI.yml/badge.svg?branch=main)](https://github.com/ITensor/BlockSparseArrays.jl/actions/workflows/CI.yml?query=branch%3Amain)
+[![Coverage](https://codecov.io/gh/ITensor/BlockSparseArrays.jl/branch/main/graph/badge.svg)](https://codecov.io/gh/ITensor/BlockSparseArrays.jl)
+[![Code Style: Blue](https://img.shields.io/badge/code%20style-blue-4495d1.svg)](https://github.com/invenia/BlueStyle)
+[![Aqua](https://raw.githubusercontent.com/JuliaTesting/Aqua.jl/master/badge.svg)](https://github.com/JuliaTesting/Aqua.jl)
-It wraps an elementwise `SparseArray` type that uses a dictionary-of-keys
-to store non-zero values, specifically a `Dictionary` from `Dictionaries.jl`.
-`BlockArrays` reinterprets the `SparseArray` as a blocked data structure.
+A block sparse array type in Julia based on the [`BlockArrays.jl`](https://github.com/JuliaArrays/BlockArrays.jl) interface.
+
+## Installation instructions
+
+```julia
+julia> using Pkg: Pkg
+
+julia> Pkg.add("https://github.com/ITensor/BlockSparseArrays.jl")
+```
+
+## Examples
````julia
-using NDTensors.BlockSparseArrays
-using BlockArrays: BlockArrays, blockedrange
-using Test
+using BlockArrays: BlockArrays, BlockedVector, Block, blockedrange
+using BlockSparseArrays: BlockSparseArray, block_stored_length
+using Test: @test, @test_broken
function main()
# Block dimensions
@@ -19,43 +32,60 @@ function main()
i_axes = (blockedrange(i1), blockedrange(i2))
function block_size(axes, block)
- return length.(getindex.(axes, BlockArrays.Block.(block.n)))
+ return length.(getindex.(axes, Block.(block.n)))
end
# Data
- nz_blocks = BlockArrays.Block.([(1, 1), (2, 2)])
+ nz_blocks = Block.([(1, 1), (2, 2)])
nz_block_sizes = [block_size(i_axes, nz_block) for nz_block in nz_blocks]
nz_block_lengths = prod.(nz_block_sizes)
+ # Blocks with contiguous underlying data
+ d_data = BlockedVector(randn(sum(nz_block_lengths)), nz_block_lengths)
+ d_blocks = [
+ reshape(@view(d_data[Block(i)]), block_size(i_axes, nz_blocks[i])) for
+ i in 1:length(nz_blocks)
+ ]
+ b = BlockSparseArray(nz_blocks, d_blocks, i_axes)
+
+ @test block_stored_length(b) == 2
+
# Blocks with discontiguous underlying data
d_blocks = randn.(nz_block_sizes)
+ b = BlockSparseArray(nz_blocks, d_blocks, i_axes)
- # Blocks with contiguous underlying data
- # d_data = PseudoBlockVector(randn(sum(nz_block_lengths)), nz_block_lengths)
- # d_blocks = [reshape(@view(d_data[Block(i)]), block_size(i_axes, nz_blocks[i])) for i in 1:length(nz_blocks)]
-
- B = BlockSparseArray(nz_blocks, d_blocks, i_axes)
+ @test block_stored_length(b) == 2
# Access a block
- B[BlockArrays.Block(1, 1)]
+ @test b[Block(1, 1)] == d_blocks[1]
- # Access a non-zero block, returns a zero matrix
- B[BlockArrays.Block(1, 2)]
+ # Access a zero block, returns a zero matrix
+ @test b[Block(1, 2)] == zeros(2, 3)
# Set a zero block
- B[BlockArrays.Block(1, 2)] = randn(2, 3)
+ a₁₂ = randn(2, 3)
+ b[Block(1, 2)] = a₁₂
+ @test b[Block(1, 2)] == a₁₂
- # Matrix multiplication (not optimized for sparsity yet)
- @test B * B ≈ Array(B) * Array(B)
+ # Matrix multiplication
+ # TODO: Fix this, broken.
+ @test_broken b * b ≈ Array(b) * Array(b)
- permuted_B = permutedims(B, (2, 1))
- @test permuted_B isa BlockSparseArray
- @test permuted_B == permutedims(Array(B), (2, 1))
+ permuted_b = permutedims(b, (2, 1))
+ @test permuted_b isa BlockSparseArray
+ @test permuted_b == permutedims(Array(b), (2, 1))
- @test B + B ≈ Array(B) + Array(B)
- @test 2B ≈ 2Array(B)
+ @test b + b ≈ Array(b) + Array(b)
+ @test b + b isa BlockSparseArray
+ # TODO: Fix this, broken.
+ @test_broken block_stored_length(b + b) == 2
- @test reshape(B, ([4, 6, 6, 9],)) isa BlockSparseArray{<:Any,1}
+ scaled_b = 2b
+ @test scaled_b ≈ 2Array(b)
+ @test scaled_b isa BlockSparseArray
+
+ # TODO: Fix this, broken.
+ @test_broken reshape(b, ([4, 6, 6, 9],)) isa BlockSparseArray{<:Any,1}
return nothing
end
@@ -66,14 +96,14 @@ main()
# BlockSparseArrays.jl and BlockArrays.jl interface
````julia
-using NDTensors.BlockSparseArrays
-using BlockArrays: BlockArrays
+using BlockArrays: BlockArrays, Block
+using BlockSparseArrays: BlockSparseArray
i1 = [2, 3]
i2 = [2, 3]
B = BlockSparseArray{Float64}(i1, i2)
-B[BlockArrays.Block(1, 1)] = randn(2, 2)
-B[BlockArrays.Block(2, 2)] = randn(3, 3)
+B[Block(1, 1)] = randn(2, 2)
+B[Block(2, 2)] = randn(3, 3)
# Minimal interface
@@ -81,7 +111,7 @@ B[BlockArrays.Block(2, 2)] = randn(3, 3)
@show collect.(BlockArrays.blockaxes(axes(B, 1)))
# Index range of a block
-@show axes(B, 1)[BlockArrays.Block(1)]
+@show axes(B, 1)[Block(1)]
# Last index of each block
@show BlockArrays.blocklasts(axes(B, 1))
@@ -90,12 +120,12 @@ B[BlockArrays.Block(2, 2)] = randn(3, 3)
@show BlockArrays.findblock(axes(B, 1), 3)
# Retrieve a block
-@show B[BlockArrays.Block(1, 1)]
-@show BlockArrays.viewblock(B, BlockArrays.Block(1, 1))
+@show B[Block(1, 1)]
+@show BlockArrays.viewblock(B, Block(1, 1))
# Check block bounds
@show BlockArrays.blockcheckbounds(B, 2, 2)
-@show BlockArrays.blockcheckbounds(B, BlockArrays.Block(2, 2))
+@show BlockArrays.blockcheckbounds(B, Block(2, 2))
# Derived interface
@@ -106,86 +136,16 @@ B[BlockArrays.Block(2, 2)] = randn(3, 3)
@show sum.(BlockArrays.eachblock(B))
# Reshape into 1-d
-@show BlockArrays.blockvec(B)[BlockArrays.Block(1)]
+# TODO: Fix this, broken.
+# @show BlockArrays.blockvec(B)[Block(1)]
# Array-of-array view
-@show BlockArrays.blocks(B)[1, 1] == B[BlockArrays.Block(1, 1)]
+@show BlockArrays.blocks(B)[1, 1] == B[Block(1, 1)]
# Access an index within a block
-@show B[BlockArrays.Block(1, 1)[1, 1]] == B[1, 1]
+@show B[Block(1, 1)[1, 1]] == B[1, 1]
````
-# Proposals for interfaces based on `BlockArrays.jl`, `SparseArrays`, and `BlockSparseArrays.jl`
-
-```julia
-# BlockSparseArray interface
-
-# Define `eachblockindex`
-eachblockindex(B::BlockArrays.AbstractBlockArray) = Iterators.product(BlockArrays.blockaxes(B)...)
-
-eachblockindex(B::BlockArrays.AbstractBlockArray, b::Block) # indices in a block
-
-blocksize(B::BlockArrays.AbstractBlockArray, b::Block) # size of a block
-blocksize(axes, b::Block) # size of a block
-
-blocklength(B::BlockArrays.AbstractBlockArray, b::Block) # length of a block
-blocklength(axes, b::Block) # length of a block
-
-# Other functions
-BlockArrays.blocksize(B) # number of blocks in each dimension
-BlockArrays.blocksizes(B) # length of blocks in each dimension
-
-tuple_block(Block(2, 2)) == (Block(2), Block(2)) # Block.(b.n)
-blocksize(axes, b::Block) = map(axis -> length(axis[Block(b.n)]), axes)
-blocksize(B, Block(2, 2)) = size(B[Block(2, 2)]) # size of a specified block
-
-# SparseArrays interface
-
-findnz(S) # outputs nonzero keys and values (SparseArrayKit.nonzero_pairs)
-nonzeros(S) # vector of structural nonzeros (SparseArrayKit.nonzero_values)
-nnz(S) # number of nonzero values (SparseArrayKit.nonzero_length)
-rowvals(S) # row that each nonzero value in `nonzeros(S)` is in
-nzrange(S, c) # range of linear indices into `nonzeros(S)` for values in column `c`
-findall(!iszero, S) # CartesianIndices of numerical nonzeros
-issparse(S)
-sparse(A) # convert to sparse
-dropzeros!(S)
-droptol!(S, tol)
-
-# BlockSparseArrays.jl + SparseArrays
-
-blockfindnz(B) # outputs nonzero block indices/keys and block views
-blocknonzeros(B)
-blocknnz(S)
-blockfindall(!iszero, B)
-isblocksparse(B)
-blocksparse(A)
-blockdropzeros!(B)
-blockdroptol!(B, tol)
-
-# SparseArrayKit.jl interface
-
-nonzero_pairs(a) # SparseArrays.findnz
-nonzero_keys(a) # SparseArrays.?
-nonzero_values(a) # SparseArrays.nonzeros
-nonzero_length(a) # SparseArrays.nnz
-
-# BlockSparseArrays.jl + SparseArrayKit.jl interface
-
-block_nonzero_pairs
-block_nonzero_keys
-block_nonzero_values
-block_nonzero_length
-```
-
-You can generate this README with:
-```julia
-using Literate
-using NDTensors.BlockSparseArrays
-dir = joinpath(pkgdir(BlockSparseArrays), "src", "BlockSparseArrays")
-Literate.markdown(joinpath(dir, "examples", "README.jl"), dir; flavor=Literate.CommonMarkFlavor())
-```
-
---
*This page was generated using [Literate.jl](https://github.com/fredrikekre/Literate.jl).*
diff --git a/TODO.md b/TODO.md
new file mode 100644
index 0000000..6c4bf82
--- /dev/null
+++ b/TODO.md
@@ -0,0 +1,68 @@
+- Add Aqua tests.
+- Turn the package extensions into actual package extensions:
+ - BlockSparseArraysAdaptExt
+ - BlockSparseArraysGradedUnitRangesExt
+ - BlockSparseArraysTensorAlgebraExt
+
+# Proposals for interfaces based on `BlockArrays.jl`, `SparseArrays`, and `BlockSparseArrays.jl`
+
+```julia
+# BlockSparseArray interface
+
+# Define `eachblockindex`
+eachblockindex(B::BlockArrays.AbstractBlockArray) = Iterators.product(BlockArrays.blockaxes(B)...)
+
+eachblockindex(B::BlockArrays.AbstractBlockArray, b::Block) # indices in a block
+
+blocksize(B::BlockArrays.AbstractBlockArray, b::Block) # size of a block
+blocksize(axes, b::Block) # size of a block
+
+blocklength(B::BlockArrays.AbstractBlockArray, b::Block) # length of a block
+blocklength(axes, b::Block) # length of a block
+
+# Other functions
+BlockArrays.blocksize(B) # number of blocks in each dimension
+BlockArrays.blocksizes(B) # length of blocks in each dimension
+
+tuple_block(Block(2, 2)) == (Block(2), Block(2)) # Block.(b.n)
+blocksize(axes, b::Block) = map(axis -> length(axis[Block(b.n)]), axes)
+blocksize(B, Block(2, 2)) = size(B[Block(2, 2)]) # size of a specified block
+
+# SparseArrays interface
+
+findnz(S) # outputs nonzero keys and values (SparseArrayKit.nonzero_pairs)
+nonzeros(S) # vector of structural nonzeros (SparseArrayKit.nonzero_values)
+nnz(S) # number of nonzero values (SparseArrayKit.nonzero_length)
+rowvals(S) # row that each nonzero value in `nonzeros(S)` is in
+nzrange(S, c) # range of linear indices into `nonzeros(S)` for values in column `c`
+findall(!iszero, S) # CartesianIndices of numerical nonzeros
+issparse(S)
+sparse(A) # convert to sparse
+dropzeros!(S)
+droptol!(S, tol)
+
+# BlockSparseArrays.jl + SparseArrays
+
+blockfindnz(B) # outputs nonzero block indices/keys and block views
+blocknonzeros(B)
+blocknnz(S)
+blockfindall(!iszero, B)
+isblocksparse(B)
+blocksparse(A)
+blockdropzeros!(B)
+blockdroptol!(B, tol)
+
+# SparseArrayKit.jl interface
+
+nonzero_pairs(a) # SparseArrays.findnz
+nonzero_keys(a) # SparseArrays.?
+nonzero_values(a) # SparseArrays.nonzeros
+nonzero_length(a) # SparseArrays.nnz
+
+# BlockSparseArrays.jl + SparseArrayKit.jl interface
+
+block_nonzero_pairs
+block_nonzero_keys
+block_nonzero_values
+block_nonzero_length
+```
diff --git a/benchmark/benchmarks.jl b/benchmark/benchmarks.jl
new file mode 100644
index 0000000..65c0acf
--- /dev/null
+++ b/benchmark/benchmarks.jl
@@ -0,0 +1,7 @@
+using BlockSparseArrays
+using BenchmarkTools
+
+SUITE = BenchmarkGroup()
+SUITE["rand"] = @benchmarkable rand(10)
+
+# Write your benchmarks here.
diff --git a/docs/Project.toml b/docs/Project.toml
new file mode 100644
index 0000000..4a68229
--- /dev/null
+++ b/docs/Project.toml
@@ -0,0 +1,21 @@
+[deps]
+BlockArrays = "8e7c35d0-a365-5155-bbbb-fb81a777f24e"
+BlockSparseArrays = "2c9a651f-6452-4ace-a6ac-809f4280fbb4"
+BroadcastMapConversion = "4a4adec5-520f-4750-bb37-d5e66b4ddeb2"
+Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
+GradedUnitRanges = "e2de450a-8a67-46c7-b59c-01d5a3d041c5"
+LabelledNumbers = "f856a3a6-4152-4ec4-b2a7-02c1a55d7993"
+Literate = "98b081ad-f1c9-55d3-8b20-4c87d4299306"
+NestedPermutedDimsArrays = "2c2a8ec4-3cfc-4276-aa3e-1307b4294e58"
+SparseArraysBase = "0d5efcca-f356-4864-8770-e1ed8d78f208"
+TensorAlgebra = "68bd88dc-f39d-4e12-b2ca-f046b68fcc6a"
+TypeParameterAccessors = "7e5a90cf-f82e-492e-a09b-e3e26432c138"
+
+[sources]
+BroadcastMapConversion = {url = "https://github.com/ITensor/BroadcastMapConversion.jl"}
+GradedUnitRanges = {url = "https://github.com/ITensor/GradedUnitRanges.jl"}
+LabelledNumbers = {url = "https://github.com/ITensor/LabelledNumbers.jl"}
+NestedPermutedDimsArrays = {url = "https://github.com/ITensor/NestedPermutedDimsArrays.jl"}
+SparseArraysBase = {url = "https://github.com/ITensor/SparseArraysBase.jl"}
+TensorAlgebra = {url = "https://github.com/ITensor/TensorAlgebra.jl"}
+TypeParameterAccessors = {url = "https://github.com/ITensor/TypeParameterAccessors.jl"}
diff --git a/docs/make.jl b/docs/make.jl
new file mode 100644
index 0000000..d24e6c3
--- /dev/null
+++ b/docs/make.jl
@@ -0,0 +1,24 @@
+using BlockSparseArrays: BlockSparseArrays
+using Documenter: Documenter, DocMeta, deploydocs, makedocs
+
+DocMeta.setdocmeta!(
+ BlockSparseArrays, :DocTestSetup, :(using BlockSparseArrays); recursive=true
+)
+
+include("make_index.jl")
+
+makedocs(;
+ modules=[BlockSparseArrays],
+ authors="ITensor developers and contributors",
+ sitename="BlockSparseArrays.jl",
+ format=Documenter.HTML(;
+ canonical="https://ITensor.github.io/BlockSparseArrays.jl",
+ edit_link="main",
+ assets=String[],
+ ),
+ pages=["Home" => "index.md"],
+)
+
+deploydocs(;
+ repo="github.com/ITensor/BlockSparseArrays.jl", devbranch="main", push_preview=true
+)
diff --git a/docs/make_index.jl b/docs/make_index.jl
new file mode 100644
index 0000000..68b1790
--- /dev/null
+++ b/docs/make_index.jl
@@ -0,0 +1,9 @@
+using Literate: Literate
+using BlockSparseArrays: BlockSparseArrays
+
+Literate.markdown(
+ joinpath(pkgdir(BlockSparseArrays), "examples", "README.jl"),
+ joinpath(pkgdir(BlockSparseArrays), "docs", "src");
+ flavor=Literate.DocumenterFlavor(),
+ name="index",
+)
diff --git a/docs/make_readme.jl b/docs/make_readme.jl
new file mode 100644
index 0000000..ae769fd
--- /dev/null
+++ b/docs/make_readme.jl
@@ -0,0 +1,9 @@
+using Literate: Literate
+using BlockSparseArrays: BlockSparseArrays
+
+Literate.markdown(
+ joinpath(pkgdir(BlockSparseArrays), "examples", "README.jl"),
+ joinpath(pkgdir(BlockSparseArrays));
+ flavor=Literate.CommonMarkFlavor(),
+ name="README",
+)
diff --git a/examples/Project.toml b/examples/Project.toml
index d1bf575..e743f22 100644
--- a/examples/Project.toml
+++ b/examples/Project.toml
@@ -1,4 +1,22 @@
[deps]
BlockArrays = "8e7c35d0-a365-5155-bbbb-fb81a777f24e"
-NDTensors = "23ae76d9-e61a-49c4-8f12-3f1a16adf9cf"
+BlockSparseArrays = "2c9a651f-6452-4ace-a6ac-809f4280fbb4"
+BroadcastMapConversion = "4a4adec5-520f-4750-bb37-d5e66b4ddeb2"
+Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
+GradedUnitRanges = "e2de450a-8a67-46c7-b59c-01d5a3d041c5"
+LabelledNumbers = "f856a3a6-4152-4ec4-b2a7-02c1a55d7993"
+Literate = "98b081ad-f1c9-55d3-8b20-4c87d4299306"
+NestedPermutedDimsArrays = "2c2a8ec4-3cfc-4276-aa3e-1307b4294e58"
+SparseArraysBase = "0d5efcca-f356-4864-8770-e1ed8d78f208"
+TensorAlgebra = "68bd88dc-f39d-4e12-b2ca-f046b68fcc6a"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
+TypeParameterAccessors = "7e5a90cf-f82e-492e-a09b-e3e26432c138"
+
+[sources]
+BroadcastMapConversion = {url = "https://github.com/ITensor/BroadcastMapConversion.jl"}
+GradedUnitRanges = {url = "https://github.com/ITensor/GradedUnitRanges.jl"}
+LabelledNumbers = {url = "https://github.com/ITensor/LabelledNumbers.jl"}
+NestedPermutedDimsArrays = {url = "https://github.com/ITensor/NestedPermutedDimsArrays.jl"}
+SparseArraysBase = {url = "https://github.com/ITensor/SparseArraysBase.jl"}
+TensorAlgebra = {url = "https://github.com/ITensor/TensorAlgebra.jl"}
+TypeParameterAccessors = {url = "https://github.com/ITensor/TypeParameterAccessors.jl"}
diff --git a/examples/README.jl b/examples/README.jl
index 9fff22a..8b70c00 100644
--- a/examples/README.jl
+++ b/examples/README.jl
@@ -1,18 +1,31 @@
# # BlockSparseArrays.jl
-#
-# A Julia `BlockSparseArray` type based on the `BlockArrays.jl` interface.
-#
-# It wraps an elementwise `SparseArray` type that uses a dictionary-of-keys
-# to store non-zero values, specifically a `Dictionary` from `Dictionaries.jl`.
-# `BlockArrays` reinterprets the `SparseArray` as a blocked data structure.
-
-using BlockArrays: BlockArrays, PseudoBlockVector, blockedrange
-using NDTensors.BlockSparseArrays: BlockSparseArray, block_stored_length
+#
+# [![Stable](https://img.shields.io/badge/docs-stable-blue.svg)](https://ITensor.github.io/BlockSparseArrays.jl/stable/)
+# [![Dev](https://img.shields.io/badge/docs-dev-blue.svg)](https://ITensor.github.io/BlockSparseArrays.jl/dev/)
+# [![Build Status](https://github.com/ITensor/BlockSparseArrays.jl/actions/workflows/CI.yml/badge.svg?branch=main)](https://github.com/ITensor/BlockSparseArrays.jl/actions/workflows/CI.yml?query=branch%3Amain)
+# [![Coverage](https://codecov.io/gh/ITensor/BlockSparseArrays.jl/branch/main/graph/badge.svg)](https://codecov.io/gh/ITensor/BlockSparseArrays.jl)
+# [![Code Style: Blue](https://img.shields.io/badge/code%20style-blue-4495d1.svg)](https://github.com/invenia/BlueStyle)
+# [![Aqua](https://raw.githubusercontent.com/JuliaTesting/Aqua.jl/master/badge.svg)](https://github.com/JuliaTesting/Aqua.jl)
+
+# A block sparse array type in Julia based on the [`BlockArrays.jl`](https://github.com/JuliaArrays/BlockArrays.jl) interface.
+
+# ## Installation instructions
+
+#=
+```julia
+julia> using Pkg: Pkg
+
+julia> Pkg.add("https://github.com/ITensor/BlockSparseArrays.jl")
+```
+=#
+
+# ## Examples
+
+using BlockArrays: BlockArrays, BlockedVector, Block, blockedrange
+using BlockSparseArrays: BlockSparseArray, block_stored_length
using Test: @test, @test_broken
function main()
- Block = BlockArrays.Block
-
## Block dimensions
i1 = [2, 3]
i2 = [2, 3]
@@ -20,7 +33,7 @@ function main()
i_axes = (blockedrange(i1), blockedrange(i2))
function block_size(axes, block)
- return length.(getindex.(axes, BlockArrays.Block.(block.n)))
+ return length.(getindex.(axes, Block.(block.n)))
end
## Data
@@ -29,7 +42,7 @@ function main()
nz_block_lengths = prod.(nz_block_sizes)
## Blocks with contiguous underlying data
- d_data = PseudoBlockVector(randn(sum(nz_block_lengths)), nz_block_lengths)
+ d_data = BlockedVector(randn(sum(nz_block_lengths)), nz_block_lengths)
d_blocks = [
reshape(@view(d_data[Block(i)]), block_size(i_axes, nz_blocks[i])) for
i in 1:length(nz_blocks)
@@ -55,22 +68,22 @@ function main()
b[Block(1, 2)] = a₁₂
@test b[Block(1, 2)] == a₁₂
- ## Matrix multiplication (not optimized for sparsity yet)
- @test b * b ≈ Array(b) * Array(b)
+ ## Matrix multiplication
+ ## TODO: Fix this, broken.
+ @test_broken b * b ≈ Array(b) * Array(b)
permuted_b = permutedims(b, (2, 1))
- ## TODO: Fix this, broken.
- @test_broken permuted_b isa BlockSparseArray
+ @test permuted_b isa BlockSparseArray
@test permuted_b == permutedims(Array(b), (2, 1))
@test b + b ≈ Array(b) + Array(b)
@test b + b isa BlockSparseArray
- @test block_stored_length(b + b) == 2
+ ## TODO: Fix this, broken.
+ @test_broken block_stored_length(b + b) == 2
scaled_b = 2b
@test scaled_b ≈ 2Array(b)
- ## TODO: Fix this, broken.
- @test_broken scaled_b isa BlockSparseArray
+ @test scaled_b isa BlockSparseArray
## TODO: Fix this, broken.
@test_broken reshape(b, ([4, 6, 6, 9],)) isa BlockSparseArray{<:Any,1}
@@ -82,14 +95,14 @@ main()
# # BlockSparseArrays.jl and BlockArrays.jl interface
-using BlockArrays: BlockArrays
-using NDTensors.BlockSparseArrays: BlockSparseArray
+using BlockArrays: BlockArrays, Block
+using BlockSparseArrays: BlockSparseArray
i1 = [2, 3]
i2 = [2, 3]
B = BlockSparseArray{Float64}(i1, i2)
-B[BlockArrays.Block(1, 1)] = randn(2, 2)
-B[BlockArrays.Block(2, 2)] = randn(3, 3)
+B[Block(1, 1)] = randn(2, 2)
+B[Block(2, 2)] = randn(3, 3)
## Minimal interface
@@ -97,7 +110,7 @@ B[BlockArrays.Block(2, 2)] = randn(3, 3)
@show collect.(BlockArrays.blockaxes(axes(B, 1)))
## Index range of a block
-@show axes(B, 1)[BlockArrays.Block(1)]
+@show axes(B, 1)[Block(1)]
## Last index of each block
@show BlockArrays.blocklasts(axes(B, 1))
@@ -106,12 +119,12 @@ B[BlockArrays.Block(2, 2)] = randn(3, 3)
@show BlockArrays.findblock(axes(B, 1), 3)
## Retrieve a block
-@show B[BlockArrays.Block(1, 1)]
-@show BlockArrays.viewblock(B, BlockArrays.Block(1, 1))
+@show B[Block(1, 1)]
+@show BlockArrays.viewblock(B, Block(1, 1))
## Check block bounds
@show BlockArrays.blockcheckbounds(B, 2, 2)
-@show BlockArrays.blockcheckbounds(B, BlockArrays.Block(2, 2))
+@show BlockArrays.blockcheckbounds(B, Block(2, 2))
## Derived interface
@@ -122,85 +135,11 @@ B[BlockArrays.Block(2, 2)] = randn(3, 3)
@show sum.(BlockArrays.eachblock(B))
## Reshape into 1-d
-@show BlockArrays.blockvec(B)[BlockArrays.Block(1)]
+## TODO: Fix this, broken.
+## @show BlockArrays.blockvec(B)[Block(1)]
## Array-of-array view
-@show BlockArrays.blocks(B)[1, 1] == B[BlockArrays.Block(1, 1)]
+@show BlockArrays.blocks(B)[1, 1] == B[Block(1, 1)]
## Access an index within a block
-@show B[BlockArrays.Block(1, 1)[1, 1]] == B[1, 1]
-
-# # Proposals for interfaces based on `BlockArrays.jl`, `SparseArrays`, and `BlockSparseArrays.jl`
-
-#=
-```julia
-# BlockSparseArray interface
-
-# Define `eachblockindex`
-eachblockindex(B::BlockArrays.AbstractBlockArray) = Iterators.product(BlockArrays.blockaxes(B)...)
-
-eachblockindex(B::BlockArrays.AbstractBlockArray, b::Block) # indices in a block
-
-blocksize(B::BlockArrays.AbstractBlockArray, b::Block) # size of a block
-blocksize(axes, b::Block) # size of a block
-
-blocklength(B::BlockArrays.AbstractBlockArray, b::Block) # length of a block
-blocklength(axes, b::Block) # length of a block
-
-# Other functions
-BlockArrays.blocksize(B) # number of blocks in each dimension
-BlockArrays.blocksizes(B) # length of blocks in each dimension
-
-tuple_block(Block(2, 2)) == (Block(2), Block(2)) # Block.(b.n)
-blocksize(axes, b::Block) = map(axis -> length(axis[Block(b.n)]), axes)
-blocksize(B, Block(2, 2)) = size(B[Block(2, 2)]) # size of a specified block
-
-# SparseArrays interface
-
-findnz(S) # outputs nonzero keys and values (SparseArrayKit.nonzero_pairs)
-nonzeros(S) # vector of structural nonzeros (SparseArrayKit.nonzero_values)
-nnz(S) # number of nonzero values (SparseArrayKit.nonzero_length)
-rowvals(S) # row that each nonzero value in `nonzeros(S)` is in
-nzrange(S, c) # range of linear indices into `nonzeros(S)` for values in column `c`
-findall(!iszero, S) # CartesianIndices of numerical nonzeros
-issparse(S)
-sparse(A) # convert to sparse
-dropzeros!(S)
-droptol!(S, tol)
-
-# BlockSparseArrays.jl + SparseArrays
-
-blockfindnz(B) # outputs nonzero block indices/keys and block views
-blocknonzeros(B)
-blocknnz(S)
-blockfindall(!iszero, B)
-isblocksparse(B)
-blocksparse(A)
-blockdropzeros!(B)
-blockdroptol!(B, tol)
-
-# SparseArrayKit.jl interface
-
-nonzero_pairs(a) # SparseArrays.findnz
-nonzero_keys(a) # SparseArrays.?
-nonzero_values(a) # SparseArrays.nonzeros
-nonzero_length(a) # SparseArrays.nnz
-
-# BlockSparseArrays.jl + SparseArrayKit.jl interface
-
-block_nonzero_pairs
-block_nonzero_keys
-block_nonzero_values
-block_nonzero_length
-```
-=#
-
-#=
-You can generate this README with:
-```julia
-using Literate
-using NDTensors.BlockSparseArrays
-dir = joinpath(pkgdir(BlockSparseArrays), "src", "BlockSparseArrays")
-Literate.markdown(joinpath(dir, "examples", "README.jl"), dir; flavor=Literate.CommonMarkFlavor())
-```
-=#
+@show B[Block(1, 1)[1, 1]] == B[1, 1]
diff --git a/ext/BlockSparseArraysGradedAxesExt/src/BlockSparseArraysGradedAxesExt.jl b/ext/BlockSparseArraysGradedUnitRangesExt/src/BlockSparseArraysGradedUnitRangesExt.jl
similarity index 87%
rename from ext/BlockSparseArraysGradedAxesExt/src/BlockSparseArraysGradedAxesExt.jl
rename to ext/BlockSparseArraysGradedUnitRangesExt/src/BlockSparseArraysGradedUnitRangesExt.jl
index 85fcaab..e10957e 100644
--- a/ext/BlockSparseArraysGradedAxesExt/src/BlockSparseArraysGradedAxesExt.jl
+++ b/ext/BlockSparseArraysGradedUnitRangesExt/src/BlockSparseArraysGradedUnitRangesExt.jl
@@ -1,4 +1,4 @@
-module BlockSparseArraysGradedAxesExt
+module BlockSparseArraysGradedUnitRangesExt
using BlockArrays:
AbstractBlockVector,
AbstractBlockedUnitRange,
@@ -14,8 +14,8 @@ using ..BlockSparseArrays:
BlockSparseMatrix,
BlockSparseVector,
block_merge
-using ...GradedAxes:
- GradedAxes,
+using GradedUnitRanges:
+ GradedUnitRanges,
AbstractGradedUnitRange,
OneToOne,
blockmergesortperm,
@@ -25,7 +25,7 @@ using ...GradedAxes:
nondual,
tensor_product
using LinearAlgebra: Adjoint, Transpose
-using ...TensorAlgebra:
+using TensorAlgebra:
TensorAlgebra, FusionStyle, BlockReshapeFusion, SectorFusion, fusedims, splitdims
# TODO: Make a `ReduceWhile` library.
@@ -77,8 +77,8 @@ end
# This is a temporary fix for `eachindex` being broken for BlockSparseArrays
# with mixed dual and non-dual axes. This shouldn't be needed once
-# GradedAxes is rewritten using BlockArrays v1.
-# TODO: Delete this once GradedAxes is rewritten.
+# GradedUnitRanges is rewritten using BlockArrays v1.
+# TODO: Delete this once GradedUnitRanges is rewritten.
function Base.eachindex(a::AbstractBlockSparseArray)
return CartesianIndices(nondual.(axes(a)))
end
@@ -98,8 +98,8 @@ end
# TODO: Remove this once that issue is fixed,
# see https://github.com/JuliaArrays/BlockArrays.jl/pull/405.
using BlockArrays: BlockRange
-using NDTensors.LabelledNumbers: label
-function GradedAxes.blocklabels(a::BlockSparseVector)
+using LabelledNumbers: label
+function GradedUnitRanges.blocklabels(a::BlockSparseVector)
return map(BlockRange(a)) do block
return label(blocks(a)[Int(block)])
end
@@ -107,8 +107,8 @@ end
# This is a temporary fix for `show` being broken for BlockSparseArrays
# with mixed dual and non-dual axes. This shouldn't be needed once
-# GradedAxes is rewritten using BlockArrays v1.
-# TODO: Delete this once GradedAxes is rewritten.
+# GradedUnitRanges is rewritten using BlockArrays v1.
+# TODO: Delete this once GradedUnitRanges is rewritten.
function blocksparse_show(
io::IO, mime::MIME"text/plain", a::AbstractArray, axes_a::Tuple; kwargs...
)
@@ -122,8 +122,8 @@ end
# This is a temporary fix for `show` being broken for BlockSparseArrays
# with mixed dual and non-dual axes. This shouldn't be needed once
-# GradedAxes is rewritten using BlockArrays v1.
-# TODO: Delete this once GradedAxes is rewritten.
+# GradedUnitRanges is rewritten using BlockArrays v1.
+# TODO: Delete this once GradedUnitRanges is rewritten.
function Base.show(io::IO, mime::MIME"text/plain", a::BlockSparseArray; kwargs...)
axes_a = axes(a)
a_nondual = BlockSparseArray(blocks(a), nondual.(axes(a)))
@@ -132,8 +132,8 @@ end
# This is a temporary fix for `show` being broken for BlockSparseArrays
# with mixed dual and non-dual axes. This shouldn't be needed once
-# GradedAxes is rewritten using BlockArrays v1.
-# TODO: Delete this once GradedAxes is rewritten.
+# GradedUnitRanges is rewritten using BlockArrays v1.
+# TODO: Delete this once GradedUnitRanges is rewritten.
function Base.show(
io::IO, mime::MIME"text/plain", a::Adjoint{<:Any,<:BlockSparseMatrix}; kwargs...
)
@@ -144,8 +144,8 @@ end
# This is a temporary fix for `show` being broken for BlockSparseArrays
# with mixed dual and non-dual axes. This shouldn't be needed once
-# GradedAxes is rewritten using BlockArrays v1.
-# TODO: Delete this once GradedAxes is rewritten.
+# GradedUnitRanges is rewritten using BlockArrays v1.
+# TODO: Delete this once GradedUnitRanges is rewritten.
function Base.show(
io::IO, mime::MIME"text/plain", a::Transpose{<:Any,<:BlockSparseMatrix}; kwargs...
)
diff --git a/ext/BlockSparseArraysGradedAxesExt/src/reducewhile.jl b/ext/BlockSparseArraysGradedUnitRangesExt/src/reducewhile.jl
similarity index 98%
rename from ext/BlockSparseArraysGradedAxesExt/src/reducewhile.jl
rename to ext/BlockSparseArraysGradedUnitRangesExt/src/reducewhile.jl
index 661c95e..5eb42b1 100644
--- a/ext/BlockSparseArraysGradedAxesExt/src/reducewhile.jl
+++ b/ext/BlockSparseArraysGradedUnitRangesExt/src/reducewhile.jl
@@ -1,9 +1,9 @@
-"""
+#=
reducewhile(f, op, collection, state)
reducewhile(x -> length(x) < 3, vcat, ["a", "b", "c", "d"], 2; init=String[]) ==
(["b", "c"], 4)
-"""
+=#
function reducewhile(f, op, collection, state; init)
prev_result = init
prev_state = state
@@ -19,12 +19,12 @@ function reducewhile(f, op, collection, state; init)
return prev_result, prev_state
end
-"""
+#=
groupreducewhile(f, op, collection, ngroups)
groupreducewhile((i, x) -> length(x) ≤ i, vcat, ["a", "b", "c", "d", "e", "f"], 3; init=String[]) ==
(["a"], ["b", "c"], ["d", "e", "f"])
-"""
+=#
function groupreducewhile(f, op, collection, ngroups; init)
state = firstindex(collection)
return ntuple(ngroups) do group_number
diff --git a/ext/BlockSparseArraysGradedAxesExt/test/Project.toml b/ext/BlockSparseArraysGradedUnitRangesExt/test/Project.toml
similarity index 100%
rename from ext/BlockSparseArraysGradedAxesExt/test/Project.toml
rename to ext/BlockSparseArraysGradedUnitRangesExt/test/Project.toml
diff --git a/ext/BlockSparseArraysGradedAxesExt/test/runtests.jl b/ext/BlockSparseArraysGradedUnitRangesExt/test/runtests.jl
similarity index 96%
rename from ext/BlockSparseArraysGradedAxesExt/test/runtests.jl
rename to ext/BlockSparseArraysGradedUnitRangesExt/test/runtests.jl
index 2c2a504..b800a81 100644
--- a/ext/BlockSparseArraysGradedAxesExt/test/runtests.jl
+++ b/ext/BlockSparseArraysGradedUnitRangesExt/test/runtests.jl
@@ -2,9 +2,9 @@
using Test: @test, @testset
using BlockArrays:
AbstractBlockArray, Block, BlockedOneTo, blockedrange, blocklengths, blocksize
-using NDTensors.BlockSparseArrays: BlockSparseArray, block_stored_length
-using NDTensors.GradedAxes:
- GradedAxes,
+using BlockSparseArrays: BlockSparseArray, block_stored_length
+using GradedUnitRanges:
+ GradedUnitRanges,
GradedOneTo,
GradedUnitRange,
GradedUnitRangeDual,
@@ -12,10 +12,10 @@ using NDTensors.GradedAxes:
dual,
gradedrange,
isdual
-using NDTensors.LabelledNumbers: label
-using NDTensors.SparseArraysBase: stored_length
-using NDTensors.SymmetrySectors: U1
-using NDTensors.TensorAlgebra: fusedims, splitdims
+using LabelledNumbers: label
+using SparseArraysBase: stored_length
+using SymmetrySectors: U1
+using TensorAlgebra: fusedims, splitdims
using LinearAlgebra: adjoint
using Random: randn!
function blockdiagonal!(f, a::AbstractArray)
@@ -27,7 +27,7 @@ function blockdiagonal!(f, a::AbstractArray)
end
const elts = (Float32, Float64, Complex{Float32}, Complex{Float64})
-@testset "BlockSparseArraysGradedAxesExt (eltype=$elt)" for elt in elts
+@testset "BlockSparseArraysGradedUnitRangesExt (eltype=$elt)" for elt in elts
@testset "map" begin
d1 = gradedrange([U1(0) => 2, U1(1) => 2])
d2 = gradedrange([U1(0) => 2, U1(1) => 2])
diff --git a/ext/BlockSparseArraysTensorAlgebraExt/src/BlockSparseArraysTensorAlgebraExt.jl b/ext/BlockSparseArraysTensorAlgebraExt/src/BlockSparseArraysTensorAlgebraExt.jl
index 74ebd65..3d4b145 100644
--- a/ext/BlockSparseArraysTensorAlgebraExt/src/BlockSparseArraysTensorAlgebraExt.jl
+++ b/ext/BlockSparseArraysTensorAlgebraExt/src/BlockSparseArraysTensorAlgebraExt.jl
@@ -1,8 +1,8 @@
module BlockSparseArraysTensorAlgebraExt
using BlockArrays: AbstractBlockedUnitRange
using ..BlockSparseArrays: AbstractBlockSparseArray, block_reshape
-using ...GradedAxes: tensor_product
-using ...TensorAlgebra: TensorAlgebra, FusionStyle, BlockReshapeFusion
+using GradedUnitRanges: tensor_product
+using TensorAlgebra: TensorAlgebra, FusionStyle, BlockReshapeFusion
function TensorAlgebra.:⊗(a1::AbstractBlockedUnitRange, a2::AbstractBlockedUnitRange)
return tensor_product(a1, a2)
diff --git a/ext/BlockSparseArraysTensorAlgebraExt/test/runtests.jl b/ext/BlockSparseArraysTensorAlgebraExt/test/runtests.jl
index e5c1e50..06dff45 100644
--- a/ext/BlockSparseArraysTensorAlgebraExt/test/runtests.jl
+++ b/ext/BlockSparseArraysTensorAlgebraExt/test/runtests.jl
@@ -1,8 +1,8 @@
@eval module $(gensym())
using Test: @test, @testset
-using NDTensors.BlockSparseArrays: BlockSparseArray
-using NDTensors.TensorAlgebra: contract
-using NDTensors.SparseArraysBase: densearray
+using BlockSparseArrays: BlockSparseArray
+using TensorAlgebra: contract
+using SparseArraysBase: densearray
@testset "BlockSparseArraysTensorAlgebraExt (eltype=$elt)" for elt in (
Float32, Float64, Complex{Float32}, Complex{Float64}
)
diff --git a/src/BlockArraysExtensions/BlockArraysExtensions.jl b/src/BlockArraysExtensions/BlockArraysExtensions.jl
index c57906f..0639dee 100644
--- a/src/BlockArraysExtensions/BlockArraysExtensions.jl
+++ b/src/BlockArraysExtensions/BlockArraysExtensions.jl
@@ -19,10 +19,9 @@ using BlockArrays:
blocks,
findblock,
findblockindex
-using Compat: allequal
using Dictionaries: Dictionary, Indices
-using ..GradedAxes: blockedunitrange_getindices, to_blockindices
-using ..SparseArraysBase: SparseArraysBase, stored_length, stored_indices
+using GradedUnitRanges: blockedunitrange_getindices, to_blockindices
+using SparseArraysBase: SparseArraysBase, stored_length, stored_indices
# A return type for `blocks(array)` when `array` isn't blocked.
# Represents a vector with just that single block.
@@ -191,25 +190,25 @@ function sub_axis(a::AbstractUnitRange, indices)
return error("Not implemented")
end
-# TODO: Use `GradedAxes.blockedunitrange_getindices`.
+# TODO: Use `GradedUnitRanges.blockedunitrange_getindices`.
# Outputs a `BlockUnitRange`.
function sub_axis(a::AbstractUnitRange, indices::AbstractUnitRange)
return only(axes(blockedunitrange_getindices(a, indices)))
end
-# TODO: Use `GradedAxes.blockedunitrange_getindices`.
+# TODO: Use `GradedUnitRanges.blockedunitrange_getindices`.
# Outputs a `BlockUnitRange`.
function sub_axis(a::AbstractUnitRange, indices::BlockSlice{<:BlockRange{1}})
return sub_axis(a, indices.block)
end
-# TODO: Use `GradedAxes.blockedunitrange_getindices`.
+# TODO: Use `GradedUnitRanges.blockedunitrange_getindices`.
# Outputs a `BlockUnitRange`.
function sub_axis(a::AbstractUnitRange, indices::BlockSlice{<:Block{1}})
return sub_axis(a, Block(indices))
end
-# TODO: Use `GradedAxes.blockedunitrange_getindices`.
+# TODO: Use `GradedUnitRanges.blockedunitrange_getindices`.
# Outputs a `BlockUnitRange`.
function sub_axis(a::AbstractUnitRange, indices::BlockSlice{<:BlockIndexRange{1}})
return sub_axis(a, indices.block)
@@ -219,25 +218,25 @@ function sub_axis(a::AbstractUnitRange, indices::BlockIndices)
return sub_axis(a, indices.blocks)
end
-# TODO: Use `GradedAxes.blockedunitrange_getindices`.
+# TODO: Use `GradedUnitRanges.blockedunitrange_getindices`.
# Outputs a `BlockUnitRange`.
function sub_axis(a::AbstractUnitRange, indices::Block)
return only(axes(blockedunitrange_getindices(a, indices)))
end
-# TODO: Use `GradedAxes.blockedunitrange_getindices`.
+# TODO: Use `GradedUnitRanges.blockedunitrange_getindices`.
# Outputs a `BlockUnitRange`.
function sub_axis(a::AbstractUnitRange, indices::BlockIndexRange)
return only(axes(blockedunitrange_getindices(a, indices)))
end
-# TODO: Use `GradedAxes.blockedunitrange_getindices`.
+# TODO: Use `GradedUnitRanges.blockedunitrange_getindices`.
# Outputs a `BlockUnitRange`.
function sub_axis(a::AbstractUnitRange, indices::AbstractVector{<:Block})
return blockedrange([length(a[index]) for index in indices])
end
-# TODO: Use `GradedAxes.blockedunitrange_getindices`.
+# TODO: Use `GradedUnitRanges.blockedunitrange_getindices`.
# TODO: Merge blocks.
function sub_axis(a::AbstractUnitRange, indices::BlockVector{<:Block})
# `collect` is needed here, otherwise a `PseudoBlockVector` is
@@ -581,7 +580,7 @@ function view!(a::AbstractArray{<:Any,N}, index::Vararg{BlockIndexRange{1},N}) w
end
using MacroTools: @capture
-using NDTensors.SparseArraysBase: is_getindex_expr
+using SparseArraysBase: is_getindex_expr
macro view!(expr)
if !is_getindex_expr(expr)
error("@view must be used with getindex syntax (as `@view! a[i,j,...]`)")
diff --git a/src/BlockArraysSparseArraysBaseExt/BlockArraysSparseArraysBaseExt.jl b/src/BlockArraysSparseArraysBaseExt/BlockArraysSparseArraysBaseExt.jl
index 56b0080..69e740e 100644
--- a/src/BlockArraysSparseArraysBaseExt/BlockArraysSparseArraysBaseExt.jl
+++ b/src/BlockArraysSparseArraysBaseExt/BlockArraysSparseArraysBaseExt.jl
@@ -1,5 +1,5 @@
using BlockArrays: AbstractBlockArray, BlocksView
-using ..SparseArraysBase: SparseArraysBase, stored_length
+using SparseArraysBase: SparseArraysBase, stored_length
function SparseArraysBase.stored_length(a::AbstractBlockArray)
return sum(b -> stored_length(b), blocks(a); init=zero(Int))
diff --git a/src/BlockSparseArrays.jl b/src/BlockSparseArrays.jl
index 576fe03..41a1b93 100644
--- a/src/BlockSparseArrays.jl
+++ b/src/BlockSparseArrays.jl
@@ -23,6 +23,8 @@ include("blocksparsearray/defaults.jl")
include("blocksparsearray/blocksparsearray.jl")
include("BlockArraysSparseArraysBaseExt/BlockArraysSparseArraysBaseExt.jl")
include("../ext/BlockSparseArraysTensorAlgebraExt/src/BlockSparseArraysTensorAlgebraExt.jl")
-include("../ext/BlockSparseArraysGradedAxesExt/src/BlockSparseArraysGradedAxesExt.jl")
+include(
+ "../ext/BlockSparseArraysGradedUnitRangesExt/src/BlockSparseArraysGradedUnitRangesExt.jl"
+)
include("../ext/BlockSparseArraysAdaptExt/src/BlockSparseArraysAdaptExt.jl")
end
diff --git a/src/abstractblocksparsearray/abstractblocksparsearray.jl b/src/abstractblocksparsearray/abstractblocksparsearray.jl
index cfe1ef5..7de1a2d 100644
--- a/src/abstractblocksparsearray/abstractblocksparsearray.jl
+++ b/src/abstractblocksparsearray/abstractblocksparsearray.jl
@@ -1,6 +1,6 @@
using BlockArrays:
BlockArrays, AbstractBlockArray, Block, BlockIndex, BlockedUnitRange, blocks
-using ..SparseArraysBase: sparse_getindex, sparse_setindex!
+using SparseArraysBase: sparse_getindex, sparse_setindex!
# TODO: Delete this. This function was replaced
# by `stored_length` but is still used in `NDTensors`.
diff --git a/src/abstractblocksparsearray/arraylayouts.jl b/src/abstractblocksparsearray/arraylayouts.jl
index 4e79b8f..e02eb97 100644
--- a/src/abstractblocksparsearray/arraylayouts.jl
+++ b/src/abstractblocksparsearray/arraylayouts.jl
@@ -1,7 +1,7 @@
using ArrayLayouts: ArrayLayouts, DualLayout, MemoryLayout, MulAdd
using BlockArrays: BlockLayout
-using ..SparseArraysBase: SparseLayout
-using ..TypeParameterAccessors: parenttype, similartype
+using SparseArraysBase: SparseLayout
+using TypeParameterAccessors: parenttype, similartype
function ArrayLayouts.MemoryLayout(arraytype::Type{<:AnyAbstractBlockSparseArray})
outer_layout = typeof(MemoryLayout(blockstype(arraytype)))
diff --git a/src/abstractblocksparsearray/map.jl b/src/abstractblocksparsearray/map.jl
index 30ca37c..2500509 100644
--- a/src/abstractblocksparsearray/map.jl
+++ b/src/abstractblocksparsearray/map.jl
@@ -1,7 +1,7 @@
using ArrayLayouts: LayoutArray
using BlockArrays: blockisequal
using LinearAlgebra: Adjoint, Transpose
-using ..SparseArraysBase:
+using SparseArraysBase:
SparseArraysBase,
SparseArrayStyle,
sparse_map!,
diff --git a/src/abstractblocksparsearray/sparsearrayinterface.jl b/src/abstractblocksparsearray/sparsearrayinterface.jl
index 31dbca2..b0b8aad 100644
--- a/src/abstractblocksparsearray/sparsearrayinterface.jl
+++ b/src/abstractblocksparsearray/sparsearrayinterface.jl
@@ -1,5 +1,5 @@
using BlockArrays: Block
-using ..SparseArraysBase: SparseArraysBase, sparse_storage, stored_indices
+using SparseArraysBase: SparseArraysBase, sparse_storage, stored_indices
# Structure storing the block sparse storage
struct BlockSparseStorage{Arr<:AbstractBlockSparseArray}
diff --git a/src/abstractblocksparsearray/views.jl b/src/abstractblocksparsearray/views.jl
index 7283b85..39872e0 100644
--- a/src/abstractblocksparsearray/views.jl
+++ b/src/abstractblocksparsearray/views.jl
@@ -92,7 +92,7 @@ function Base.view(a::AbstractBlockSparseArray{<:Any,N}, I::Vararg{Block{1},N})
return viewblock(a, I...)
end
-# TODO: Move to `GradedAxes` or `BlockArraysExtensions`.
+# TODO: Move to `GradedUnitRanges` or `BlockArraysExtensions`.
to_block(I::Block{1}) = I
to_block(I::BlockIndexRange{1}) = Block(I)
to_block_indices(I::Block{1}) = Colon()
diff --git a/src/abstractblocksparsearray/wrappedabstractblocksparsearray.jl b/src/abstractblocksparsearray/wrappedabstractblocksparsearray.jl
index c961f67..20ef541 100644
--- a/src/abstractblocksparsearray/wrappedabstractblocksparsearray.jl
+++ b/src/abstractblocksparsearray/wrappedabstractblocksparsearray.jl
@@ -9,7 +9,7 @@ using BlockArrays:
mortar,
unblock
using SplitApplyCombine: groupcount
-using ..TypeParameterAccessors: similartype
+using TypeParameterAccessors: similartype
const WrappedAbstractBlockSparseArray{T,N} = WrappedArray{
T,N,AbstractBlockSparseArray,AbstractBlockSparseArray{T,N}
@@ -71,7 +71,7 @@ function BlockArrays.blocks(
return blocksparse_blocks(a)
end
-using ..TypeParameterAccessors: parenttype
+using TypeParameterAccessors: parenttype
function blockstype(arraytype::Type{<:WrappedAbstractBlockSparseArray})
return blockstype(parenttype(arraytype))
end
diff --git a/src/backup/qr.jl b/src/backup/qr.jl
index 4cd4527..c480398 100644
--- a/src/backup/qr.jl
+++ b/src/backup/qr.jl
@@ -1,4 +1,4 @@
-using ...SparseArraysBase: SparseArrayDOK
+using .SparseArraysBase: SparseArrayDOK
# Check if the matrix has 1 or fewer entries
# per row/column.
diff --git a/src/blocksparsearray/blocksparsearray.jl b/src/blocksparsearray/blocksparsearray.jl
index 19de9fd..3b3bbae 100644
--- a/src/blocksparsearray/blocksparsearray.jl
+++ b/src/blocksparsearray/blocksparsearray.jl
@@ -1,6 +1,6 @@
using BlockArrays: BlockArrays, Block, BlockedUnitRange, blockedrange, blocklength
using Dictionaries: Dictionary
-using ..SparseArraysBase: SparseArrayDOK
+using SparseArraysBase: SparseArrayDOK
# TODO: Delete this.
## using BlockArrays: blocks
diff --git a/src/blocksparsearray/defaults.jl b/src/blocksparsearray/defaults.jl
index ab126aa..938610d 100644
--- a/src/blocksparsearray/defaults.jl
+++ b/src/blocksparsearray/defaults.jl
@@ -1,6 +1,6 @@
using BlockArrays: Block
using Dictionaries: Dictionary
-using ..SparseArraysBase: SparseArrayDOK
+using SparseArraysBase: SparseArrayDOK
# Construct the sparse structure storing the blocks
function default_blockdata(
diff --git a/src/blocksparsearrayinterface/arraylayouts.jl b/src/blocksparsearrayinterface/arraylayouts.jl
index f7d02ae..3ec48d5 100644
--- a/src/blocksparsearrayinterface/arraylayouts.jl
+++ b/src/blocksparsearrayinterface/arraylayouts.jl
@@ -1,6 +1,6 @@
using ArrayLayouts: ArrayLayouts, Dot, MatMulMatAdd, MatMulVecAdd, MulAdd
using BlockArrays: BlockLayout
-using ..SparseArraysBase: SparseLayout
+using SparseArraysBase: SparseLayout
using LinearAlgebra: dot, mul!
function blocksparse_muladd!(
diff --git a/src/blocksparsearrayinterface/blocksparsearrayinterface.jl b/src/blocksparsearrayinterface/blocksparsearrayinterface.jl
index 4e9b85a..af2a9cd 100644
--- a/src/blocksparsearrayinterface/blocksparsearrayinterface.jl
+++ b/src/blocksparsearrayinterface/blocksparsearrayinterface.jl
@@ -13,7 +13,7 @@ using BlockArrays:
blocks,
findblockindex
using LinearAlgebra: Adjoint, Transpose
-using ..SparseArraysBase: perm, iperm, stored_length, sparse_zero!
+using SparseArraysBase: perm, iperm, stored_length, sparse_zero!
blocksparse_blocks(a::AbstractArray) = error("Not implemented")
@@ -142,7 +142,7 @@ end
# BlockArrays
-using ..SparseArraysBase: SparseArraysBase, AbstractSparseArray, AbstractSparseMatrix
+using SparseArraysBase: SparseArraysBase, AbstractSparseArray, AbstractSparseMatrix
_perm(::PermutedDimsArray{<:Any,<:Any,perm}) where {perm} = perm
_invperm(::PermutedDimsArray{<:Any,<:Any,<:Any,invperm}) where {invperm} = invperm
diff --git a/src/blocksparsearrayinterface/broadcast.jl b/src/blocksparsearrayinterface/broadcast.jl
index 7ce8d02..7028d29 100644
--- a/src/blocksparsearrayinterface/broadcast.jl
+++ b/src/blocksparsearrayinterface/broadcast.jl
@@ -1,5 +1,5 @@
using Base.Broadcast: BroadcastStyle, AbstractArrayStyle, DefaultArrayStyle, Broadcasted
-using ..BroadcastMapConversion: map_function, map_args
+using BroadcastMapConversion: map_function, map_args
struct BlockSparseArrayStyle{N} <: AbstractArrayStyle{N} end
diff --git a/src/blocksparsearrayinterface/cat.jl b/src/blocksparsearrayinterface/cat.jl
index b2d6596..0115033 100644
--- a/src/blocksparsearrayinterface/cat.jl
+++ b/src/blocksparsearrayinterface/cat.jl
@@ -1,8 +1,8 @@
using BlockArrays: AbstractBlockedUnitRange, blockedrange, blocklengths
-using NDTensors.SparseArraysBase: SparseArraysBase, allocate_cat_output, sparse_cat!
+using SparseArraysBase: SparseArraysBase, allocate_cat_output, sparse_cat!
# TODO: Maybe move to `SparseArraysBaseBlockArraysExt`.
-# TODO: Handle dual graded unit ranges, for example in a new `SparseArraysBaseGradedAxesExt`.
+# TODO: Handle dual graded unit ranges, for example in a new `SparseArraysBaseGradedUnitRangesExt`.
function SparseArraysBase.axis_cat(
a1::AbstractBlockedUnitRange, a2::AbstractBlockedUnitRange
)
diff --git a/test/Project.toml b/test/Project.toml
index b046080..bb2b12d 100644
--- a/test/Project.toml
+++ b/test/Project.toml
@@ -1,6 +1,28 @@
[deps]
BlockArrays = "8e7c35d0-a365-5155-bbbb-fb81a777f24e"
-Compat = "34da2185-b29b-5c13-b0c7-acf172513d20"
+BlockSparseArrays = "2c9a651f-6452-4ace-a6ac-809f4280fbb4"
+BroadcastMapConversion = "4a4adec5-520f-4750-bb37-d5e66b4ddeb2"
GPUArraysCore = "46192b85-c4d5-4398-a991-12ede77f4527"
+GradedUnitRanges = "e2de450a-8a67-46c7-b59c-01d5a3d041c5"
JLArrays = "27aeb0d3-9eb9-45fb-866b-73c2ecf80fcb"
+LabelledNumbers = "f856a3a6-4152-4ec4-b2a7-02c1a55d7993"
+LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
NDTensors = "23ae76d9-e61a-49c4-8f12-3f1a16adf9cf"
+NestedPermutedDimsArrays = "2c2a8ec4-3cfc-4276-aa3e-1307b4294e58"
+Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
+Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
+SparseArraysBase = "0d5efcca-f356-4864-8770-e1ed8d78f208"
+SymmetrySectors = "f8a8ad64-adbc-4fce-92f7-ffe2bb36a86e"
+TensorAlgebra = "68bd88dc-f39d-4e12-b2ca-f046b68fcc6a"
+Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
+TypeParameterAccessors = "7e5a90cf-f82e-492e-a09b-e3e26432c138"
+
+[sources]
+BroadcastMapConversion = {url = "https://github.com/ITensor/BroadcastMapConversion.jl"}
+GradedUnitRanges = {url = "https://github.com/ITensor/GradedUnitRanges.jl"}
+LabelledNumbers = {url = "https://github.com/ITensor/LabelledNumbers.jl"}
+NestedPermutedDimsArrays = {url = "https://github.com/ITensor/NestedPermutedDimsArrays.jl"}
+SparseArraysBase = {url = "https://github.com/ITensor/SparseArraysBase.jl"}
+SymmetrySectors = {url = "https://github.com/ITensor/SymmetrySectors.jl"}
+TensorAlgebra = {url = "https://github.com/ITensor/TensorAlgebra.jl"}
+TypeParameterAccessors = {url = "https://github.com/ITensor/TypeParameterAccessors.jl"}
diff --git a/test/runtests.jl b/test/runtests.jl
index 2a8e2c5..9bbc63b 100644
--- a/test/runtests.jl
+++ b/test/runtests.jl
@@ -1,5 +1,5 @@
@eval module $(gensym())
include("test_basics.jl")
include("../ext/BlockSparseArraysTensorAlgebraExt/test/runtests.jl")
-include("../ext/BlockSparseArraysGradedAxesExt/test/runtests.jl")
+include("../ext/BlockSparseArraysGradedUnitRangesExt/test/runtests.jl")
end
diff --git a/test/test_basics.jl b/test/test_basics.jl
index 13eda13..0f2692c 100644
--- a/test/test_basics.jl
+++ b/test/test_basics.jl
@@ -14,10 +14,7 @@ using BlockArrays:
blocksize,
blocksizes,
mortar
-using Compat: @compat
-using GPUArraysCore: @allowscalar
-using LinearAlgebra: Adjoint, Transpose, dot, mul!, norm
-using NDTensors.BlockSparseArrays:
+using BlockSparseArrays:
@view!,
BlockSparseArray,
BlockSparseMatrix,
@@ -29,10 +26,11 @@ using NDTensors.BlockSparseArrays:
blockstype,
blocktype,
view!
+using GPUArraysCore: @allowscalar
+using LinearAlgebra: Adjoint, Transpose, dot, mul!, norm
using NDTensors.GPUArraysCoreExtensions: cpu
-using NDTensors.SparseArraysBase: stored_length
-using NDTensors.SparseArraysBase: SparseArrayDOK, SparseMatrixDOK, SparseVectorDOK
-using NDTensors.TensorAlgebra: contract
+using SparseArraysBase: SparseArrayDOK, SparseMatrixDOK, SparseVectorDOK, stored_length
+using TensorAlgebra: contract
using Test: @test, @test_broken, @test_throws, @testset, @inferred
include("TestBlockSparseArraysUtils.jl")
@@ -772,7 +770,7 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype
return (; a, b, x)
end
for abx in (f1(), f2())
- @compat (; a, b, x) = abx
+ (; a, b, x) = abx
@test b isa SubArray{<:Any,<:Any,<:BlockSparseArray}
@test block_stored_length(b) == 1
@test b[Block(1, 1)] == x
@@ -1012,7 +1010,7 @@ using .NDTensorsTestUtils: devices_list, is_supported_eltype
a2[Block(1, 1)] = dev(randn(elt, size(@view(a1[Block(1, 1)]))))
# TODO: Make this work, requires customization of `TensorAlgebra.fusedims` and
# `TensorAlgebra.splitdims` in terms of `BlockSparseArrays.block_reshape`,
- # and customization of `TensorAlgebra.:⊗` in terms of `GradedAxes.tensor_product`.
+ # and customization of `TensorAlgebra.:⊗` in terms of `GradedUnitRanges.tensor_product`.
a_dest, dimnames_dest = contract(a1, (1, -1), a2, (-1, 2))
@allowscalar begin
a_dest_dense, dimnames_dest_dense = contract(Array(a1), (1, -1), Array(a2), (-1, 2))