diff --git a/src/tensora/codegen/__init__.py b/src/tensora/codegen/__init__.py index e69de29..7d525b1 100644 --- a/src/tensora/codegen/__init__.py +++ b/src/tensora/codegen/__init__.py @@ -0,0 +1,2 @@ +from .ast_to_c import ast_to_c +from .type_to_c import type_to_c diff --git a/src/tensora/codegen/type_to_c.py b/src/tensora/codegen/type_to_c.py index 9746f2b..5d28f67 100644 --- a/src/tensora/codegen/type_to_c.py +++ b/src/tensora/codegen/type_to_c.py @@ -43,18 +43,12 @@ def type_to_c_pointer(type: Pointer, variable: Optional[str] = None): @type_to_c.register(Array) def type_to_c_array(type: Array, variable: Optional[str] = None): - if variable is None: - return f"{type_to_c(type.element)}[]" - else: - return f"{type_to_c(type.element)} {variable}[]" + return f"{type_to_c(type.element, variable)}[]" @type_to_c.register(FixedArray) def type_to_c_fixed_array(type: FixedArray, variable: Optional[str] = None): - if variable is None: - return f"{type_to_c(type.element)}[{type.n}]" - else: - return f"{type_to_c(type.element)} {variable}[{type.n}]" + return f"{type_to_c(type.element, variable)}[{type.n}]" def space_variable(variable: Optional[str] = None): diff --git a/src/tensora/desugar/__init__.py b/src/tensora/desugar/__init__.py index e69de29..c8eb3c8 100644 --- a/src/tensora/desugar/__init__.py +++ b/src/tensora/desugar/__init__.py @@ -0,0 +1,6 @@ +from .ast import Assignment, Integer, Float, Scalar, Tensor, Add, Multiply, Contract +from .collect_lattices import collect_lattices +from .desugar_expression import desugar_assignment +from .id import Id +from .to_identifiable import to_identifiable +from .to_iteration_graph import to_iteration_graph diff --git a/src/tensora/desugar/collect_lattices.py b/src/tensora/desugar/collect_lattices.py index d95e3d6..e887aa9 100644 --- a/src/tensora/desugar/collect_lattices.py +++ b/src/tensora/desugar/collect_lattices.py @@ -3,8 +3,8 @@ from functools import singledispatch from tensora.format.format import Format -from tensora.iteration_graph.merge_lattice.merge_lattice import LatticeConjuction, LatticeDisjunction, LatticeLeaf, Lattice from . import ast +from ..iteration_graph import LatticeConjuction, LatticeDisjunction, LatticeLeaf, Lattice from ..iteration_graph.identifiable_expression import ast as id diff --git a/src/tensora/desugar/id.py b/src/tensora/desugar/id.py index 4fb4f8c..716ba52 100644 --- a/src/tensora/desugar/id.py +++ b/src/tensora/desugar/id.py @@ -1,6 +1,6 @@ -from dataclasses import dataclass - +__all__ = ["Id"] +from dataclasses import dataclass @dataclass(frozen=True) @@ -10,4 +10,4 @@ class Id: def to_tensor_leaf(self): from tensora.iteration_graph.identifiable_expression.tensor_leaf import TensorLeaf - return TensorLeaf(self.name, self.instance) \ No newline at end of file + return TensorLeaf(self.name, self.instance) diff --git a/src/tensora/desugar/to_identifiable.py b/src/tensora/desugar/to_identifiable.py index fc3b0c1..1293873 100644 --- a/src/tensora/desugar/to_identifiable.py +++ b/src/tensora/desugar/to_identifiable.py @@ -2,9 +2,9 @@ from functools import singledispatch -from tensora.format.format import Format -from ..iteration_graph.identifiable_expression import ast as id from . import ast as desugar +from ..format import Format +from ..iteration_graph.identifiable_expression import ast as id def to_identifiable(assignment: desugar.Assignment, input_formats: dict[str, Format], output_format: Format) -> id.Assignment: diff --git a/src/tensora/desugar/to_iteration_graph.py b/src/tensora/desugar/to_iteration_graph.py index 3742f9b..ada2784 100644 --- a/src/tensora/desugar/to_iteration_graph.py +++ b/src/tensora/desugar/to_iteration_graph.py @@ -3,16 +3,13 @@ from dataclasses import replace from functools import singledispatch from itertools import count -from typing import Dict, Iterator, Tuple -from .collect_lattices import collect_lattices -from tensora.format.format import Format, Mode -from tensora.iteration_graph.identifiable_expression.tensor_leaf import TensorLeaf - -from tensora.iteration_graph.merge_lattice.merge_lattice import Lattice, LatticeLeaf +from typing import Dict, Iterator from . import ast -from ..iteration_graph import iteration_graph as graph +from .collect_lattices import collect_lattices +from ..iteration_graph import Lattice, LatticeLeaf, iteration_graph as graph from ..iteration_graph.identifiable_expression import ast as id +from ..format import Format def to_iteration_graph(assignment: ast.Assignment, formats: dict[str, Format], output_format: Format) -> graph.IterationGraph: diff --git a/src/tensora/ir/__init__.py b/src/tensora/ir/__init__.py index e69de29..e6d471a 100644 --- a/src/tensora/ir/__init__.py +++ b/src/tensora/ir/__init__.py @@ -0,0 +1,3 @@ +from .ast import Statement, Expression, Assignable, Variable, AttributeAccess, ArrayIndex, IntegerLiteral, FloatLiteral, BooleanLiteral, ModeLiteral, ArrayLiteral, Add, Subtract, Multiply, Equal, NotEqual, GreaterThan, GreaterThanOrEqual, LessThan, LessThanOrEqual, And, Or, FunctionCall, Max, Min, Address, BooleanToInteger, Allocate, ArrayAllocate, ArrayReallocate, Free, Declaration, Assignment, DeclarationAssignment, Block, Branch, Loop, Break, Return, FunctionDefinition +from .builder import SourceBuilder +from .peephole import peephole diff --git a/src/tensora/ir/builder.py b/src/tensora/ir/builder.py index 521e484..615eaa1 100644 --- a/src/tensora/ir/builder.py +++ b/src/tensora/ir/builder.py @@ -6,8 +6,8 @@ from contextlib import contextmanager from typing import List, Dict, Optional, Union -from tensora.ir.ast import Statement, FunctionDefinition, Variable, Declaration, Block, Expression, Branch, Loop -from tensora.ir.types import Type +from .ast import Statement, FunctionDefinition, Variable, Declaration, Block, Expression, Branch, Loop +from .types import Type class Builder: diff --git a/src/tensora/iteration_graph/__init__.py b/src/tensora/iteration_graph/__init__.py index e69de29..a6d2dca 100644 --- a/src/tensora/iteration_graph/__init__.py +++ b/src/tensora/iteration_graph/__init__.py @@ -0,0 +1,3 @@ +from .iteration_graph_to_c_code import generate_c_code, KernelType +from .problem import Problem +from .merge_lattice import Lattice, LatticeLeaf, LatticeConjuction, LatticeDisjunction, IterationMode diff --git a/src/tensora/iteration_graph/identifiable_expression/__init__.py b/src/tensora/iteration_graph/identifiable_expression/__init__.py index e3ac522..5a69915 100644 --- a/src/tensora/iteration_graph/identifiable_expression/__init__.py +++ b/src/tensora/iteration_graph/identifiable_expression/__init__.py @@ -1,2 +1,7 @@ from .ast import Assignment, Expression, Variable +from .exhaust_tensors import exhaust_tensor +from .index_dimension import index_dimension +from .index_dimensions import index_dimensions from .tensor_leaf import TensorLeaf +from .to_ir import to_ir +from .variables import to_c_code diff --git a/src/tensora/iteration_graph/identifiable_expression/ast.py b/src/tensora/iteration_graph/identifiable_expression/ast.py index c4a8d58..43abb42 100644 --- a/src/tensora/iteration_graph/identifiable_expression/ast.py +++ b/src/tensora/iteration_graph/identifiable_expression/ast.py @@ -4,8 +4,8 @@ from dataclasses import dataclass from typing import Tuple -from tensora import Mode -from tensora.iteration_graph.identifiable_expression.tensor_leaf import TensorLeaf +from ...format import Mode +from .tensor_leaf import TensorLeaf class Node: diff --git a/src/tensora/iteration_graph/identifiable_expression/exhaust_tensors.py b/src/tensora/iteration_graph/identifiable_expression/exhaust_tensors.py index fdbf7e2..74464e1 100644 --- a/src/tensora/iteration_graph/identifiable_expression/exhaust_tensors.py +++ b/src/tensora/iteration_graph/identifiable_expression/exhaust_tensors.py @@ -1,8 +1,7 @@ from functools import singledispatch from .ast import * - -from tensora.iteration_graph.identifiable_expression import TensorLeaf +from .tensor_leaf import TensorLeaf @singledispatch diff --git a/src/tensora/iteration_graph/iteration_graph_to_c_code.py b/src/tensora/iteration_graph/iteration_graph_to_c_code.py index 4f249e3..801c881 100644 --- a/src/tensora/iteration_graph/iteration_graph_to_c_code.py +++ b/src/tensora/iteration_graph/iteration_graph_to_c_code.py @@ -7,7 +7,7 @@ from typing import List, Optional, Set, Tuple from .identifiable_expression import ast as ie_ast -from .identifiable_expression.to_ir import to_ir +from .identifiable_expression import to_ir from .iteration_graph import IterationGraph, IterationVariable, TerminalExpression, Add as GraphAdd from .merge_lattice import LatticeLeaf from .names import dimension_name, pos_name, crd_name, vals_name, crd_capacity_name, pos_capacity_name, \ diff --git a/src/tensora/iteration_graph/merge_lattice/__init__.py b/src/tensora/iteration_graph/merge_lattice/__init__.py index fea67be..7b7f026 100644 --- a/src/tensora/iteration_graph/merge_lattice/__init__.py +++ b/src/tensora/iteration_graph/merge_lattice/__init__.py @@ -1 +1 @@ -from .merge_lattice import * +from .merge_lattice import Lattice, LatticeLeaf, LatticeConjuction, LatticeDisjunction, IterationMode diff --git a/src/tensora/iteration_graph/problem.py b/src/tensora/iteration_graph/problem.py index 248783e..ebc29a0 100644 --- a/src/tensora/iteration_graph/problem.py +++ b/src/tensora/iteration_graph/problem.py @@ -1,10 +1,10 @@ +__all__ = ["Problem"] + from dataclasses import dataclass from typing import Dict -from tensora import Format -from tensora.iteration_graph.identifiable_expression import Assignment -from tensora.iteration_graph.identifiable_expression.index_dimension import index_dimension -from tensora.iteration_graph.identifiable_expression.index_dimensions import index_dimensions +from ..format import Format +from .identifiable_expression import Assignment, index_dimension, index_dimensions @dataclass(frozen=True) diff --git a/src/tensora/iteration_graph/problem_to_iteration_graph.py b/src/tensora/iteration_graph/problem_to_iteration_graph.py deleted file mode 100644 index ad2183a..0000000 --- a/src/tensora/iteration_graph/problem_to_iteration_graph.py +++ /dev/null @@ -1,52 +0,0 @@ -from itertools import product, permutations, chain -from typing import Callable - -from .iteration_graph import IterationVariable, TerminalExpression, IterationGraph -from .merge_lattice import Lattice -from ..format import Format, Mode -from ..expression.ast import Variable, Expression -from ..problem.problem import Problem - - -def generate_iteration_graphs_from_tensor(variable: Variable, format: Format): - # Yield every legal iteration graph for this variable for the given format - # Legal iteration graphs always iterate each dimension in order except adjacent dense dimensions, which may be - # iterated in any order. - - # The index variables in mode order - ordered_indexes = [variable.indexes[dimension] for dimension in format.ordering] - - # The dimensions that can be iterated over - lattices = [Lattice(variable.name, i_layer, mode) for i_layer, mode in enumerate(format.modes)] - - # Indexes that can be permuted form a cluster. Dense modes while they exist will append to the cluster of previous - # dense modes, otherwise the mode forms a new length-1 cluster. - clusters = [[]] - previous_mode_dense = True - for index_variable, layer in zip(ordered_indexes, lattices): - if layer.mode == Mode.dense: - if previous_mode_dense: - clusters[-1].append((index_variable, layer)) - else: - clusters.append([(index_variable, layer)]) - previous_mode_dense = True - elif layer.mode == Mode.compressed: - clusters.append([(index_variable, layer)]) - previous_mode_dense = False - - for nested_list in product(*(permutations(cluster) for cluster in clusters)): - flattened_list = list(chain.from_iterable(nested_list)) - graph = TerminalExpression(variable) - for index_variable, layer in flattened_list: - graph = IterationVariable(index_variable, layer, graph) - - yield graph - - -def conjunctive_merge(left: IterationGraph, right: IterationGraph, - operation: Callable[[Expression, Expression], Expression]): - pass - -def problem_to_iteration_graph(problem: Problem): - tensors = [] - 1 diff --git a/src/tensora/native.py b/src/tensora/native.py new file mode 100644 index 0000000..8f3b379 --- /dev/null +++ b/src/tensora/native.py @@ -0,0 +1,26 @@ +__all__ = ["generate_code", "KernelType"] + +from typing import Dict +from tensora.expression import parse_assignment +from tensora.format import parse_format +from tensora.desugar import desugar_assignment, to_identifiable, to_iteration_graph +from tensora.iteration_graph import KernelType, generate_c_code, Problem +from tensora.codegen import ast_to_c +from tensora.ir import peephole + + +def generate_code(assignment: str, output_format: str, input_formats: Dict[str, str], kernel_type: KernelType) -> str: + assignment_parsed = parse_assignment(assignment).unwrap() + input_formats_parsed = {name: parse_format(format).unwrap() for name, format in input_formats.items()} + output_format_parsed = parse_format(output_format).unwrap() + + desugar = desugar_assignment(assignment_parsed) + + identifiable_assignment = to_identifiable(desugar, input_formats_parsed, output_format_parsed) + + graph = to_iteration_graph(desugar, input_formats_parsed, output_format_parsed) + problem = Problem(identifiable_assignment, input_formats_parsed, output_format_parsed) + + ir = generate_c_code(problem, graph, kernel_type).finalize() + + return ast_to_c(peephole(ir)) diff --git a/src/tensora/native_generator.py b/src/tensora/native_generator.py deleted file mode 100644 index 38a0cf4..0000000 --- a/src/tensora/native_generator.py +++ /dev/null @@ -1,30 +0,0 @@ -__all__ =["assignment_to_c_code"] - -from typing import Dict -from tensora.codegen.ast_to_c import ast_to_c -from tensora.desugar.to_identifiable import to_identifiable -from tensora.desugar.desugar_expression import desugar_assignment -from tensora.desugar.to_iteration_graph import to_iteration_graph -from tensora.expression.parser import parse_assignment - -from tensora.format.parser import parse_format -from tensora.ir.peephole import peephole -from tensora.iteration_graph.iteration_graph_to_c_code import KernelType, generate_c_code -from tensora.iteration_graph.problem import Problem - - -def assignment_to_c_code(string: str, input_formats: Dict[str, str], output_format: str, kernel_type: KernelType) -> str: - assignment = parse_assignment(string).unwrap() - input_formats_parsed = {name: parse_format(format).unwrap() for name, format in input_formats.items()} - output_format_parsed = parse_format(output_format).unwrap() - - desugar = desugar_assignment(assignment) - - identifiable_assignment = to_identifiable(desugar, input_formats_parsed, output_format_parsed) - - graph = to_iteration_graph(desugar, input_formats_parsed, output_format_parsed) - problem = Problem(identifiable_assignment, input_formats_parsed, output_format_parsed) - - ir = generate_c_code(problem, graph, kernel_type).finalize() - - return ast_to_c(peephole(ir)) diff --git a/tests/codegen/test_ast_to_c.py b/tests/codegen/test_ast_to_c.py index 7f7075c..3f09cca 100644 --- a/tests/codegen/test_ast_to_c.py +++ b/tests/codegen/test_ast_to_c.py @@ -80,12 +80,12 @@ def clean(string: str) -> str: (Declaration(Variable("x"), float), "double x"), (Declaration(Variable("x"), tensor), "taco_tensor_t x"), (Declaration(Variable("x"), hash_table), "hash_table_t x"), - (Declaration(Variable("x"), Pointer(float)), "double * restrict x"), - (Declaration(Variable("x"), Pointer(Pointer(integer))), "int32_t * restrict * restrict x"), - (Declaration(Variable("x"), Array(float)), "double[] x"), - (Declaration(Variable("x"), Array(Array(integer))), "int32_t[][] x"), - (Declaration(Variable("x"), FixedArray(mode, 3)), "taco_mode_t[3] x"), - (Declaration(Variable("x"), FixedArray(FixedArray(mode, 3), 2)), "taco_mode_t[3][2] x"), + (Declaration(Variable("x"), Pointer(float)), "double* restrict x"), + (Declaration(Variable("x"), Pointer(Pointer(integer))), "int32_t* restrict* restrict x"), + (Declaration(Variable("x"), Array(float)), "double x[]"), + (Declaration(Variable("x"), Array(Array(integer))), "int32_t x[][]"), + (Declaration(Variable("x"), FixedArray(mode, 3)), "taco_mode_t x[3]"), + (Declaration(Variable("x"), FixedArray(FixedArray(mode, 3), 2)), "taco_mode_t x[3][2]"), # Assignment (Assignment(Variable("x"), Variable("y")), "x = y"), (Assignment(Variable("x"), Add(Variable("x"), IntegerLiteral(1))), "x++"), diff --git a/tests/test_combinatorically.py b/tests/test_combinatorically.py index 8aa8953..362b3f3 100644 --- a/tests/test_combinatorically.py +++ b/tests/test_combinatorically.py @@ -88,11 +88,3 @@ def test_matrix_multiply_add(dense1, dense2, dense3, format1, format2, format3, in1=(dense1, format1), in2=(dense2, format2), in3=(dense3, format3)) - -@pytest.mark.parametrize('dense1', [[[[0, 2, 4], [0, -1, 0]], [[0, 0, 0], [0, 0, 0]]], [[[0, 2, 4], [0, -1, 0]], [[0, 0, 0], [0, 0, 0]]]]) -@pytest.mark.parametrize('format1', ['sss']) -@pytest.mark.parametrize('format_out', ['ss']) -def test_inner_contract(dense1, format1, format_out): - assert_same_as_dense('out(i,k) = in1(i,j,k) * in2(i,j,k)', format_out, - in1=(dense1, format1), - in2=(dense1, format1)) diff --git a/tests/test_desugar.py b/tests/test_desugar.py index 6bfccb7..1b444ca 100644 --- a/tests/test_desugar.py +++ b/tests/test_desugar.py @@ -1,9 +1,8 @@ import pytest -from tensora.desugar.desugar_expression import desugar_assignment +from tensora.desugar import desugar_assignment, Id from tensora.expression import ast as sugar from tensora.desugar import ast as desugar -from tensora.desugar.id import Id @pytest.mark.parametrize( diff --git a/tests/test_native.py b/tests/test_native.py new file mode 100644 index 0000000..5f74a36 --- /dev/null +++ b/tests/test_native.py @@ -0,0 +1,10 @@ +from tensora.native import generate_code, KernelType + + +def test_native_codegen(): + assignment = "f(i) = A0(i) + A1(i,j) * x(j) + A2(i,k,l) * x(k) * x(l)" + output_format = "d" + input_formats = {"A0": "d", "A1": "ds", "A2": "dss", "x": "d"} + kernel_type = KernelType.compute + code = generate_code(assignment, output_format, input_formats, kernel_type) + assert isinstance(code, str) diff --git a/tests/test_temp.py b/tests/test_temp.py deleted file mode 100644 index 4d13caa..0000000 --- a/tests/test_temp.py +++ /dev/null @@ -1,433 +0,0 @@ -from tensora import Mode, Format -from tensora.codegen.ast_to_c import ast_to_c -from tensora.desugar.to_identifiable import to_identifiable -from tensora.desugar.to_iteration_graph import to_iteration_graph -from tensora.desugar.desugar_expression import desugar_assignment -from tensora.expression.parser import parse_assignment -from tensora.format import parse_format -from tensora.ir.peephole import peephole -from tensora.iteration_graph.iteration_graph import * -from tensora.iteration_graph.merge_lattice import * -from tensora.iteration_graph.identifiable_expression import ast -from tensora.iteration_graph.iteration_graph_to_c_code import iteration_graph_to_c_code, AppendOutput, generate_c_code, \ - KernelType - -# def test_temp(): -# method = tensor_method('A(i,j) = B(i,k) * C(k,j)', dict(B='ds', C='ds'), 'dd') -# -from tensora.iteration_graph.problem import Problem -from tensora.native_generator import assignment_to_c_code - - -def pf(text: str): - return parse_format(text).unwrap() - - -def test_native(): - string = "f(i) = A0(i) + A1(i,j) * x(j) + A2(i,k,l) * x(k) * x(l)" - input_formats = {"f": pf("d"), "A0": pf("d"), "A1": pf("ds"), "A2": pf("dss"), "x": pf("d")} - output_format = pf("d") - print(string) - assignment = parse_assignment(string).unwrap() - print(repr(assignment)) - desugar = desugar_assignment(assignment) - print(desugar) - identifiable_assignment = to_identifiable(desugar, input_formats) - graph = to_iteration_graph(desugar, input_formats) - print(graph) - problem = Problem(identifiable_assignment, input_formats, output_format) - ir = generate_c_code(problem, graph, KernelType.compute).finalize() - print(ir) - code = ast_to_c(peephole(ir)) - print(code) - -def test_native_codegen(): - assignment = "f(i) = A0(i) + A1(i,j) * x(j) + A2(i,k,l) * x(k) * x(l)" - input_formats = {"A0": "d", "A1": "ds", "A2": "dss", "x": "d"} - output_format = "d" - kernel_type = KernelType.compute - print(assignment_to_c_code(assignment, input_formats, output_format, kernel_type)) - -# def test_code(): -# assignment = parse_assignment('a(i,j) = b(i,j) * c(i,j)').or_die() -# code = generate_c_code(assignment, dict(b=pf('ss'), c=pf('ss')), pf('ss')) -# print(code) - - -# def test_iteration(): -# assignment = parse_assignment('a(i,j) = b(i,j) * c(i,j)').or_die() -# generate_iteration_graphs(assignment, dict(b=pf('ss'), c=pf('ss')), pf('ss')) - -# def test_temp(): -# from tensora import Tensor -# a = Tensor.from_lol([[0,1,2],[0,0,3]], format='ss') -# b = Tensor.from_lol([[1,0,0],[0,2,0]], format='ss') -# from tensora import evaluate -# c = evaluate('out = a * b', 'ss', a=a, b=b) - -def test_multiply_matrix(): - # a(i,j) = b(i,j) * c(i,j); a=ss, b=ss, c=ss - a = ast.Tensor(TensorLeaf('a', 0), ('i', 'j'), (Mode.compressed, Mode.compressed)) - b = ast.Tensor(TensorLeaf('b', 0), ('i', 'j'), (Mode.compressed, Mode.compressed)) - c = ast.Tensor(TensorLeaf('c', 0), ('i', 'j'), (Mode.compressed, Mode.compressed)) - - expression = ast.Multiply(b, c) - assignment = ast.Assignment(a, expression) - - format = Format((Mode.compressed, Mode.compressed), (0, 1)) - problem = Problem(assignment, {'b': format, 'c': format}, format) - - algo = IterationVariable( - index_variable='i', - output=LatticeLeaf(a, 0), - lattice=LatticeDisjunction( - LatticeLeaf(b, 0), - LatticeLeaf(c, 0), - ), - next=IterationVariable( - index_variable='j', - output=LatticeLeaf(a, 1), - lattice=LatticeDisjunction( - LatticeLeaf(b, 1), - LatticeLeaf(c, 1), - ), - next=TerminalExpression(expression), - ) - ) - - print(ast_to_c(peephole(generate_c_code(problem, algo, KernelType.evaluate).finalize()))) - - -def test_matrix_vector_product(): - # a(i) = b(i,j) * c(j); a=d, b=ds, c=d - a = ast.Tensor(TensorLeaf('a', 0), ('i',), (Mode.dense,)) - b = ast.Tensor(TensorLeaf('b', 0), ('i', 'j'), (Mode.dense, Mode.compressed)) - c = ast.Tensor(TensorLeaf('c', 0), ('j',), (Mode.dense,)) - - expression = ast.Multiply(b, c) - assignment = ast.Assignment(a, expression) - - format = Format(a.modes, (0,)) - problem = Problem(assignment, {'b': format, 'c': Format(b.modes, (0, 1))}, format) - - algo = IterationVariable( - index_variable='i', - output=LatticeLeaf(a, 0), - lattice=LatticeLeaf(b, 0), - next=IterationVariable( - index_variable='j', - output=None, - lattice=LatticeDisjunction( - LatticeLeaf(b, 1), - LatticeLeaf(c, 0), - ), - next=TerminalExpression(expression), - ) - ) - - print(ast_to_c(peephole(generate_c_code(problem, algo, KernelType.evaluate).finalize()))) - - -def test_add_dense_vector(): - # a(i) = b(i) + c(i); a=d, b=d, c=d - a = ast.Tensor(TensorLeaf('a', 0), ('i',), (Mode.dense,)) - b = ast.Tensor(TensorLeaf('b', 0), ('i',), (Mode.dense,)) - c = ast.Tensor(TensorLeaf('c', 0), ('i',), (Mode.dense,)) - - expression = ast.Add(b, c) - assignment = ast.Assignment(a, expression) - - format = Format((Mode.dense,), (0,)) - problem = Problem(assignment, {'b': format, 'c': format}, format) - - algo = IterationVariable( - index_variable='i', - output=LatticeLeaf(a, 0), - lattice=LatticeConjuction( - LatticeLeaf(b, 0), - LatticeLeaf(c, 0), - ), - next=TerminalExpression(expression), - ) - - print(ast_to_c(peephole(generate_c_code(problem, algo, KernelType.evaluate).finalize()))) - - -def test_add_sparse_vector(): - # a(i) = b(i) + c(i); a=s, b=s, c=s - a = ast.Tensor(TensorLeaf('a', 0), ('i',), (Mode.compressed,)) - b = ast.Tensor(TensorLeaf('b', 0), ('i',), (Mode.compressed,)) - c = ast.Tensor(TensorLeaf('c', 0), ('i',), (Mode.compressed,)) - - expression = ast.Add(b, c) - assignment = ast.Assignment(a, expression) - - format = Format((Mode.compressed,), (0,)) - problem = Problem(assignment, {'b': format, 'c': format}, format) - - algo = IterationVariable( - index_variable='i', - output=LatticeLeaf(a, 0), - lattice=LatticeConjuction( - LatticeLeaf(b, 0), - LatticeLeaf(c, 0), - ), - next=TerminalExpression(expression), - ) - - print(ast_to_c(peephole(generate_c_code(problem, algo, KernelType.evaluate).finalize()))) - - -def test_add_sparse_matrix(): - # a(i,j) = b(i,j) + c(i,j); a=ss, b=ss, c=ss - a = ast.Tensor(TensorLeaf('a', 0), ('i', 'j'), (Mode.compressed, Mode.compressed)) - b = ast.Tensor(TensorLeaf('b', 0), ('i', 'j'), (Mode.compressed, Mode.compressed)) - c = ast.Tensor(TensorLeaf('c', 0), ('i', 'j'), (Mode.compressed, Mode.compressed)) - - expression = ast.Add(b, c) - assignment = ast.Assignment(a, expression) - - format = Format((Mode.compressed, Mode.compressed), (0, 1)) - problem = Problem(assignment, {'b': format, 'c': format}, format) - - algo = IterationVariable( - index_variable='i', - output=LatticeLeaf(a, 0), - lattice=LatticeConjuction( - LatticeLeaf(b, 0), - LatticeLeaf(c, 0), - ), - next=IterationVariable( - index_variable='j', - output=LatticeLeaf(a, 1), - lattice=LatticeConjuction( - LatticeLeaf(b, 1), - LatticeLeaf(c, 1), - ), - next=TerminalExpression(expression), - ) - ) - - print(ast_to_c(peephole(generate_c_code(problem, algo, KernelType.evaluate).finalize()))) - - -def test_contract(): - # a(i) = b(i,j) + c(i,j); a=s, b=ss, c=ss - a = ast.Tensor(TensorLeaf('a', 0), ('i',), (Mode.compressed,)) - b = ast.Tensor(TensorLeaf('b', 0), ('i', 'j'), (Mode.compressed, Mode.compressed)) - c = ast.Tensor(TensorLeaf('c', 0), ('i', 'j'), (Mode.compressed, Mode.compressed)) - - expression = ast.Add(b, c) - assignment = ast.Assignment(a, expression) - - format = Format((Mode.compressed, Mode.compressed), (0, 1)) - problem = Problem(assignment, {'b': format, 'c': format}, Format((Mode.compressed,), (0,))) - - algo = IterationVariable( - index_variable='i', - output=LatticeLeaf(a, 0), - lattice=LatticeConjuction( - LatticeLeaf(b, 0), - LatticeLeaf(c, 0), - ), - next=IterationVariable( - index_variable='j', - output=None, - lattice=LatticeConjuction( - LatticeLeaf(b, 1), - LatticeLeaf(c, 1), - ), - next=TerminalExpression(expression), - ) - ) - - print(ast_to_c(peephole(generate_c_code(problem, algo, KernelType.evaluate).finalize()))) - - -def test_add_multiply(): - # y(i) = A(i,j) * x(j) + b(i); y=d, A=ds, x=d, b=d - y = ast.Tensor(TensorLeaf('y', 0), ('i',), (Mode.dense,)) - A = ast.Tensor(TensorLeaf('A', 0), ('i', 'j'), (Mode.dense, Mode.compressed)) - x = ast.Tensor(TensorLeaf('x', 0), ('j',), (Mode.dense,)) - b = ast.Tensor(TensorLeaf('b', 0), ('i',), (Mode.dense,)) - - expression = ast.Add(ast.Multiply(A, x), b) - assignment = ast.Assignment(y, expression) - - format = Format(y.modes, tuple(range(len(y.modes)))) - A_format = Format((Mode.compressed, Mode.compressed), (0, 1)) - problem = Problem(assignment, {'A': A_format, 'x': format, 'b': format}, format) - - algo = IterationVariable( - index_variable='i', - output=LatticeLeaf(y, 0), - lattice=LatticeConjuction( - LatticeLeaf(A, 0), - LatticeLeaf(b, 0), - ), - next=Add( - name='output', - terms=[ - IterationVariable( - index_variable='j', - output=None, - lattice=LatticeDisjunction( - LatticeLeaf(A, 1), - LatticeLeaf(x, 0), - ), - next=TerminalExpression(ast.Multiply(A, x)), - ), - TerminalExpression(b), - ] - ) - ) - - print(ast_to_c(peephole(generate_c_code(problem, algo, KernelType.evaluate).finalize()))) - - -def test_rhs(): - # f(i) = A0(i) + A1(i,j) * x(j) + A2(i,k,l)*x(k)*x(l); f=d, A0=d, A1=ds, A2=dss, x=d - f = ast.Tensor(TensorLeaf('f', 0), ('i',), (Mode.dense,)) - A0 = ast.Tensor(TensorLeaf('A0', 0), ('i',), (Mode.dense,)) - A1 = ast.Tensor(TensorLeaf('A1', 0), ('i', 'j'), (Mode.dense, Mode.compressed)) - A2 = ast.Tensor(TensorLeaf('A2', 0), ('i', 'k', 'l'), (Mode.dense, Mode.compressed, Mode.compressed)) - x1 = ast.Tensor(TensorLeaf('x', 1), ('j',), (Mode.dense,)) - x2 = ast.Tensor(TensorLeaf('x', 2), ('j',), (Mode.dense,)) - x3 = ast.Tensor(TensorLeaf('x', 3), ('j',), (Mode.dense,)) - - expression = ast.Add(ast.Add(A0, ast.Multiply(A1, x1)), ast.Multiply(ast.Multiply(A2, x2), x3)) - assignment = ast.Assignment(f, expression) - - format = Format(f.modes, tuple(range(len(f.modes)))) - problem = Problem(assignment, {'A0': format, 'A1': pf('ds'), 'x': format, 'A2': pf('dss')}, format) - - algo = IterationVariable( - index_variable='i', - output=LatticeLeaf(f, 0), - lattice=LatticeConjuction( - LatticeLeaf(A0, 0), - LatticeConjuction( - LatticeLeaf(A1, 0), - LatticeLeaf(A2, 0), - ) - ), - next=Add( - name='output', - terms=[ - TerminalExpression(A0), - IterationVariable( - index_variable='j', - output=None, - lattice=LatticeDisjunction( - LatticeLeaf(A1, 1), - LatticeLeaf(x1, 0), - ), - next=TerminalExpression(ast.Multiply(A1, x1)), - ), - IterationVariable( - index_variable='k', - output=None, - lattice=LatticeDisjunction( - LatticeLeaf(A2, 1), - LatticeLeaf(x2, 0), - ), - next=IterationVariable( - index_variable='l', - output=None, - lattice=LatticeDisjunction( - LatticeLeaf(A2, 2), - LatticeLeaf(x3, 0), - ), - next=TerminalExpression(ast.Multiply(ast.Multiply(A2, x2), x3)), - ), - ), - ] - ) - ) - - print(ast_to_c(peephole(generate_c_code(problem, algo, KernelType.compute).finalize()))) - - -def test_hash(): - # a(i,k) = b(i,j) * c(j,k); a=ds, b=ds, c=ds - a = ast.Tensor(TensorLeaf('a', 0), ('i', 'k'), (Mode.dense, Mode.compressed)) - b = ast.Tensor(TensorLeaf('b', 0), ('i', 'j'), (Mode.dense, Mode.compressed)) - c = ast.Tensor(TensorLeaf('c', 0), ('j', 'k'), (Mode.dense, Mode.compressed)) - - expression = ast.Multiply(b, c) - assignment = ast.Assignment(a, expression) - - format = Format((Mode.dense, Mode.compressed), (0, 1)) - problem = Problem(assignment, {'b': format, 'c': format}, format) - - algo = IterationVariable( - index_variable='i', - output=LatticeLeaf(a, 0), - lattice=LatticeLeaf(b, 0), - next=IterationVariable( - index_variable='j', - output=None, - lattice=LatticeDisjunction( - LatticeLeaf(b, 1), - LatticeLeaf(c, 0), - ), - next=IterationVariable( - index_variable='k', - output=LatticeLeaf(a, 1), - lattice=LatticeLeaf(c, 1), - next=TerminalExpression(expression), - ), - ) - ) - - print(ast_to_c(peephole(generate_c_code(problem, algo, KernelType.compute).finalize()))) - -# def test_dense(): -# # a(i,j) = b(i,j) + c(i,j); a=ss, b=dd, c=dd -# algo = IterationVariable( -# index_variable='i', -# output_tensor='a', -# lattice=LatticeConjuction( -# LatticeLeaf(TensorLeaf('b', 0), 0, Mode.dense), -# LatticeLeaf(TensorLeaf('c', 0), 0, Mode.dense), -# ), -# next=IterationVariable( -# index_variable='j', -# output_tensor='a', -# lattice=LatticeConjuction( -# LatticeLeaf(TensorLeaf('b', 0), 1, Mode.dense), -# LatticeLeaf(TensorLeaf('c', 0), 1, Mode.dense), -# ), -# next=TerminalExpression(ast.Add( -# ast.Tensor(TensorLeaf('b', 0), ['i', 'j']), -# ast.Tensor(TensorLeaf('c', 0), ['i', 'j']), -# )), -# ) -# ) -# -# print(iteration_graph_to_c_code(algo, output=Compressed('a', 1)).source()) - -# def test_csr(): -# # a(i,j) = b(i,j) + c(i,j); a=ds, b=ds, c=ds -# algo = IterationVariable( -# index_variable='i', -# output=LatticeLeaf(TensorLeaf('b', 0), 1, Mode.dense), -# lattice=LatticeConjuction( -# LatticeLeaf(TensorLeaf('b', 0), 0, Mode.dense), -# LatticeLeaf(TensorLeaf('c', 0), 0, Mode.dense), -# ), -# next=IterationVariable( -# index_variable='j', -# output=LatticeLeaf(TensorLeaf('b', 0), 1, Mode.compressed), -# lattice=LatticeConjuction( -# LatticeLeaf(TensorLeaf('b', 0), 1, Mode.compressed), -# LatticeLeaf(TensorLeaf('c', 0), 1, Mode.compressed), -# ), -# next=TerminalExpression(ast.Add( -# ast.Tensor(TensorLeaf('b', 0), ['i', 'j']), -# ast.Tensor(TensorLeaf('c', 0), ['i', 'j']), -# )), -# ) -# ) -# -# print(iteration_graph_to_c_code(algo, output=Compressed('a', 1)).source())