Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Sep 13, 2023
1 parent aa88491 commit cf89509
Showing 1 changed file with 71 additions and 16 deletions.
87 changes: 71 additions & 16 deletions python/tests/__snapshots__/test_array_api/test_sklearn_lda.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,31 +9,74 @@
assume_value_one_of(_NDArray_2, _TupleValue_1)
_NDArray_3 = reshape(_NDArray_2, TupleInt(Int(-1)))
_NDArray_4 = astype(unique_counts(_NDArray_3)[Int(1)], DType.float64) / NDArray.scalar(Value.float(Float(150.0)))
_NDArray_5 = zeros(TupleInt(Int(3)) + TupleInt(Int(4)), OptionalDType.some(DType.float64), OptionalDevice.some(_NDArray_1.device))
_NDArray_5 = zeros(
TupleInt(Int(3)) + TupleInt(Int(4)), OptionalDType.some(DType.float64), OptionalDevice.some(_NDArray_1.device)
)
_MultiAxisIndexKey_1 = MultiAxisIndexKey(MultiAxisIndexKeyItem.slice(Slice()))
_IndexKey_1 = IndexKey.multi_axis(MultiAxisIndexKey(MultiAxisIndexKeyItem.int(Int(0))) + _MultiAxisIndexKey_1)
_NDArray_5[_IndexKey_1] = mean(_NDArray_1[ndarray_index(unique_inverse(_NDArray_3)[Int(1)] == NDArray.scalar(Value.int(Int(0))))], OptionalIntOrTuple.int(Int(0)))
_NDArray_5[_IndexKey_1] = mean(
_NDArray_1[ndarray_index(unique_inverse(_NDArray_3)[Int(1)] == NDArray.scalar(Value.int(Int(0))))],
OptionalIntOrTuple.int(Int(0)),
)
_IndexKey_2 = IndexKey.multi_axis(MultiAxisIndexKey(MultiAxisIndexKeyItem.int(Int(1))) + _MultiAxisIndexKey_1)
_NDArray_5[_IndexKey_2] = mean(_NDArray_1[ndarray_index(unique_inverse(_NDArray_3)[Int(1)] == NDArray.scalar(Value.int(Int(1))))], OptionalIntOrTuple.int(Int(0)))
_NDArray_5[_IndexKey_2] = mean(
_NDArray_1[ndarray_index(unique_inverse(_NDArray_3)[Int(1)] == NDArray.scalar(Value.int(Int(1))))],
OptionalIntOrTuple.int(Int(0)),
)
_IndexKey_3 = IndexKey.multi_axis(MultiAxisIndexKey(MultiAxisIndexKeyItem.int(Int(2))) + _MultiAxisIndexKey_1)
_NDArray_5[_IndexKey_3] = mean(_NDArray_1[ndarray_index(unique_inverse(_NDArray_3)[Int(1)] == NDArray.scalar(Value.int(Int(2))))], OptionalIntOrTuple.int(Int(0)))
_NDArray_5[_IndexKey_3] = mean(
_NDArray_1[ndarray_index(unique_inverse(_NDArray_3)[Int(1)] == NDArray.scalar(Value.int(Int(2))))],
OptionalIntOrTuple.int(Int(0)),
)
_NDArray_6 = concat(
TupleNDArray(_NDArray_1[ndarray_index(_NDArray_3 == NDArray.vector(_TupleValue_1)[IndexKey.int(Int(0))])] - _NDArray_5[_IndexKey_1])
TupleNDArray(
_NDArray_1[ndarray_index(_NDArray_3 == NDArray.vector(_TupleValue_1)[IndexKey.int(Int(0))])]
- _NDArray_5[_IndexKey_1]
)
+ (
TupleNDArray(_NDArray_1[ndarray_index(_NDArray_3 == NDArray.vector(_TupleValue_1)[IndexKey.int(Int(1))])] - _NDArray_5[_IndexKey_2])
+ TupleNDArray(_NDArray_1[ndarray_index(_NDArray_3 == NDArray.vector(_TupleValue_1)[IndexKey.int(Int(2))])] - _NDArray_5[_IndexKey_3])
TupleNDArray(
_NDArray_1[ndarray_index(_NDArray_3 == NDArray.vector(_TupleValue_1)[IndexKey.int(Int(1))])]
- _NDArray_5[_IndexKey_2]
)
+ TupleNDArray(
_NDArray_1[ndarray_index(_NDArray_3 == NDArray.vector(_TupleValue_1)[IndexKey.int(Int(2))])]
- _NDArray_5[_IndexKey_3]
)
),
OptionalInt.some(Int(0)),
)
_NDArray_7 = std(_NDArray_6, OptionalIntOrTuple.int(Int(0)))
_NDArray_7[ndarray_index(std(_NDArray_6, OptionalIntOrTuple.int(Int(0))) == NDArray.scalar(Value.int(Int(0))))] = NDArray.scalar(Value.float(Float(1.0)))
_TupleNDArray_1 = svd(sqrt(NDArray.scalar(Value.int(NDArray.scalar(Value.float(Float(1.0))).to_int() / Int(147)))) * (_NDArray_6 / _NDArray_7), FALSE)
_Slice_1 = Slice(OptionalInt.none, OptionalInt.some(astype(sum(_TupleNDArray_1[Int(1)] > NDArray.scalar(Value.float(Float(0.0001)))), DType.int32).to_int()))
_NDArray_8 = (_TupleNDArray_1[Int(2)][IndexKey.multi_axis(MultiAxisIndexKey(MultiAxisIndexKeyItem.slice(_Slice_1)) + _MultiAxisIndexKey_1)] / _NDArray_7).T / _TupleNDArray_1[
Int(1)
][IndexKey.slice(_Slice_1)]
_NDArray_7[
ndarray_index(std(_NDArray_6, OptionalIntOrTuple.int(Int(0))) == NDArray.scalar(Value.int(Int(0))))
] = NDArray.scalar(Value.float(Float(1.0)))
_TupleNDArray_1 = svd(
sqrt(NDArray.scalar(Value.int(NDArray.scalar(Value.float(Float(1.0))).to_int() / Int(147))))
* (_NDArray_6 / _NDArray_7),
FALSE,
)
_Slice_1 = Slice(
OptionalInt.none,
OptionalInt.some(
astype(sum(_TupleNDArray_1[Int(1)] > NDArray.scalar(Value.float(Float(0.0001)))), DType.int32).to_int()
),
)
_NDArray_8 = (
_TupleNDArray_1[Int(2)][
IndexKey.multi_axis(MultiAxisIndexKey(MultiAxisIndexKeyItem.slice(_Slice_1)) + _MultiAxisIndexKey_1)
]
/ _NDArray_7
).T / _TupleNDArray_1[Int(1)][IndexKey.slice(_Slice_1)]
_TupleNDArray_2 = svd(
(sqrt(NDArray.scalar(Value.int((Int(150) * _NDArray_4.to_int()) * (NDArray.scalar(Value.float(Float(1.0))).to_int() / Int(2))))) * (_NDArray_5 - (_NDArray_4 @ _NDArray_5)).T).T
(
sqrt(
NDArray.scalar(
Value.int(
(Int(150) * _NDArray_4.to_int()) * (NDArray.scalar(Value.float(Float(1.0))).to_int() / Int(2))
)
)
)
* (_NDArray_5 - (_NDArray_4 @ _NDArray_5)).T
).T
@ _NDArray_8,
FALSE,
)
Expand All @@ -50,7 +93,14 @@
OptionalInt.none,
OptionalInt.some(
astype(
sum(_TupleNDArray_2[Int(1)] > (NDArray.scalar(Value.float(Float(0.0001))) * _TupleNDArray_2[Int(1)][IndexKey.int(Int(0))])), DType.int32
sum(
_TupleNDArray_2[Int(1)]
> (
NDArray.scalar(Value.float(Float(0.0001)))
* _TupleNDArray_2[Int(1)][IndexKey.int(Int(0))]
)
),
DType.int32,
).to_int()
),
)
Expand All @@ -59,4 +109,9 @@
)
]
)
)[IndexKey.multi_axis(_MultiAxisIndexKey_1 + MultiAxisIndexKey(MultiAxisIndexKeyItem.slice(Slice(OptionalInt.none, OptionalInt.some(Int(2))))))]
)[
IndexKey.multi_axis(
_MultiAxisIndexKey_1
+ MultiAxisIndexKey(MultiAxisIndexKeyItem.slice(Slice(OptionalInt.none, OptionalInt.some(Int(2)))))
)
]

0 comments on commit cf89509

Please sign in to comment.