Skip to content

Commit

Permalink
Merge pull request #156 from evanhaldane/toArray
Browse files Browse the repository at this point in the history
readArray methods
  • Loading branch information
Atry authored May 25, 2018
2 parents 6a48e9e + 511bbc5 commit d38b12e
Show file tree
Hide file tree
Showing 3 changed files with 266 additions and 3 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ So `my2DArray` is a 2D array of 2x3 size.
Note that a `Tensor` can be a zero dimensional array, which is simply a scalar value.

```
val scalar = Tensor(42.f)
val scalar = Tensor(42.0f)
println(scalar.shape.length) // 0
```

Expand Down
132 changes: 130 additions & 2 deletions Tensors/src/main/scala/com/thoughtworks/compute/Tensors.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1092,7 +1092,7 @@ trait Tensors extends OpenCL {
private[compute] def doBuffer: Do[PendingBuffer[closure.JvmValue]]

/** Returns a RAII managed asynchronous task to read this [[Tensor]] into an off-heap memory,
* which is linearized in row-majoy order.
* which is linearized in row-major order.
*
* @group slow
*/
Expand All @@ -1109,14 +1109,142 @@ trait Tensors extends OpenCL {
}

/** Returns an asynchronous task to read this [[Tensor]] into a [[scala.Array]],
* which is linearized in row-majoy order.
* which is linearized in row-major order.
*
* @group slow
*/
def flatArray: Future[Array[closure.JvmValue]] = {
flatBuffer.intransitiveMap(closure.valueType.memory.toArray).run
}

// Convert flat arrays into multidimensional ones
private[Tensors] def make2DArray[A:ClassTag](flat:Array[A], shape:Array[Int]): Array[Array[A]] = {
Array.tabulate(shape(0), shape(1))((i,j) => flat(j + i*shape(1)))
}

private[Tensors] def make3DArray[A:ClassTag](flat:Array[A], shape:Array[Int]): Array[Array[Array[A]]] = {
Array.tabulate(shape(0), shape(1), shape(2))((i,j,k) => flat(k + j*shape(2) + i*shape(1)*shape(2)))
}

private[Tensors] def make4DArray[A:ClassTag](flat:Array[A], shape:Array[Int]): Array[Array[Array[Array[A]]]] = {
Array.tabulate(shape(0), shape(1), shape(2), shape(3))(
(i,j,k,l) => flat(l + k*shape(3) + j*shape(2)*shape(3) + i*shape(1)*shape(2)*shape(3)))
}

private[Tensors] def make5DArray[A:ClassTag](flat:Array[A], shape:Array[Int]): Array[Array[Array[Array[Array[A]]]]] = {
Array.tabulate(shape(0), shape(1), shape(2), shape(3), shape(4))(
(i,j,k,l,m) => flat(m + l*shape(4) + k*shape(3)*shape(4) + j*shape(2)*shape(3)*shape(4) + i*shape(1)*shape(2)*shape(3)*shape(4)))
}

private[Tensors] def make2DSeq[A:ClassTag](flat:Seq[A], shape:Seq[Int]): Seq[Seq[A]] = {
Seq.tabulate(shape(0), shape(1))((i,j) => flat(j + i*shape(1)))
}

private[Tensors] def make3DSeq[A:ClassTag](flat:Seq[A], shape:Seq[Int]): Seq[Seq[Seq[A]]] = {
Seq.tabulate(shape(0), shape(1), shape(2))((i,j,k) => flat(k + j*shape(2) + i*shape(1)*shape(2)))
}

private[Tensors] def make4DSeq[A:ClassTag](flat:Seq[A], shape:Seq[Int]): Seq[Seq[Seq[Seq[A]]]] = {
Seq.tabulate(shape(0), shape(1), shape(2), shape(3))(
(i,j,k,l) => flat(l + k*shape(3) + j*shape(2)*shape(3) + i*shape(1)*shape(2)*shape(3)))
}

private[Tensors] def make5DSeq[A:ClassTag](flat:Seq[A], shape:Seq[Int]): Seq[Seq[Seq[Seq[Seq[A]]]]] = {
Seq.tabulate(shape(0), shape(1), shape(2), shape(3), shape(4))(
(i,j,k,l,m) => flat(m + l*shape(4) + k*shape(3)*shape(4) + j*shape(2)*shape(3)*shape(4) + i*shape(1)*shape(2)*shape(3)*shape(4)))
}

/** Returns an asynchronous task to read this [[Tensor]] into a
* [[scala.Float]]
*
* @group slow
*/
def readScalar : Future[Float] = {
flatArray.map(z => z(0))
}

/** Returns an asynchronous task to read this [[Tensor]] into a
* [[scala.Array]]
*
* @group slow
*/
def read1DArray : Future[Array[Float]] = {
flatArray.map(z => z)
}

/** Returns an asynchronous task to read this [[Tensor]] into a 2D [[scala.Array]]
*
* @group slow
*/
def read2DArray : Future[Array[Array[Float]]] = {
flatArray.map(z => make2DArray(z,shape))
}

/** Returns an asynchronous task to read this [[Tensor]] into a 3D [[scala.Array]]
*
* @group slow
*/
def read3DArray : Future[Array[Array[Array[Float]]]] = {
flatArray.map(z => make3DArray(z,shape))
}

/** Returns an asynchronous task to read this [[Tensor]] into a 4D [[scala.Array]]
*
* @group slow
*/
def read4DArray : Future[Array[Array[Array[Array[Float]]]]] = {
flatArray.map(z => make4DArray(z,shape))
}

/** Returns an asynchronous task to read this [[Tensor]] into a 5D [[scala.Array]]
*
* @group slow
*/
def read5DArray : Future[Array[Array[Array[Array[Array[Float]]]]]] = {
flatArray.map(z => make5DArray(z,shape))
}

/** Returns an asynchronous task to read this [[Tensor]] into a
* [[scala.Seq]]
*
* @group slow
*/
def read1DSeq : Future[Seq[Float]] = {
flatArray.map(z => z)
}

/** Returns an asynchronous task to read this [[Tensor]] into a 2D [[scala.Seq]]
*
* @group slow
*/
def read2DSeq : Future[Seq[Seq[Float]]] = {
flatArray.map(z => make2DSeq(z,shape))
}

/** Returns an asynchronous task to read this [[Tensor]] into a 3D [[scala.Seq]]
*
* @group slow
*/
def read3DSeq : Future[Seq[Seq[Seq[Float]]]] = {
flatArray.map(z => make3DSeq(z,shape))
}

/** Returns an asynchronous task to read this [[Tensor]] into a 4D [[scala.Seq]]
*
* @group slow
*/
def read4DSeq : Future[Seq[Seq[Seq[Seq[Float]]]]] = {
flatArray.map(z => make4DSeq(z,shape))
}

/** Returns an asynchronous task to read this [[Tensor]] into a 5D [[scala.Seq]]
*
* @group slow
*/
def read5DSeq : Future[Seq[Seq[Seq[Seq[Seq[Float]]]]]] = {
flatArray.map(z => make5DSeq(z,shape))
}

/**
* @group metadata
*/
Expand Down
135 changes: 135 additions & 0 deletions Tensors/src/test/scala/com/thoughtworks/compute/TensorsSpec.scala
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,142 @@ class TensorsSpec extends AsyncFreeSpec with Matchers {
}
.run
.toScalaFuture

"readScalar" in doTensors
.flatMap { tensors =>
Do.garbageCollected(tensors.Tensor(42.0f).readScalar).map {a=>
a should be(42.0f)
}
}
.run
.toScalaFuture

"read1DArray" in doTensors
.flatMap { tensors =>
Do.garbageCollected(tensors.Tensor(Array[Float](1,2)).read1DArray).map {a=>
a should be(Array[Float](1,2))
}
}
.run
.toScalaFuture

"read2DArray" in doTensors
.flatMap { tensors =>
import tensors._
val array = Array(Array[Float](1, 2), Array[Float](3, 4), Array[Float](5,6))
Do.garbageCollected(Tensor(array).read2DArray).map {a=>
a(0) should be(Array[Float](1, 2))
a(1) should be(Array[Float](3, 4))
a(2) should be(Array[Float](5, 6))
}
}
.run
.toScalaFuture

"read3DArray" in doTensors
.flatMap { tensors =>
import tensors._
val array = Array(Array(Array[Float](1, 2), Array[Float](3, 4), Array[Float](5,6)), Array(Array[Float](7, 8), Array[Float](9, 10), Array[Float](11,12)))
Do.garbageCollected(Tensor(array).read3DArray).map { a =>
a(0)(0) should be(Array[Float](1,2))
a(0)(1) should be(Array[Float](3,4))
a(0)(2) should be(Array[Float](5,6))
a(1)(0) should be(Array[Float](7,8))
a(1)(1) should be(Array[Float](9,10))
a(1)(2) should be(Array[Float](11,12))
}
}
.run
.toScalaFuture

"read4DArray" in doTensors
.flatMap { tensors =>
import tensors._
val array = Array(Array(Array(Array[Float](1, 2), Array[Float](3, 4), Array[Float](5,6)),
Array(Array[Float](7, 8), Array[Float](9, 10), Array[Float](11,12))),
Array(Array(Array[Float](13, 14), Array[Float](15, 16), Array[Float](17,18)),
Array(Array[Float](19, 20), Array[Float](21, 22), Array[Float](23,24))))
Do.garbageCollected(Tensor(array).read4DArray).map { a =>
a(0)(0)(0) should be(Array[Float](1,2))
a(0)(0)(1) should be(Array[Float](3,4))
a(0)(0)(2) should be(Array[Float](5,6))
a(0)(1)(0) should be(Array[Float](7,8))
a(0)(1)(1) should be(Array[Float](9,10))
a(0)(1)(2) should be(Array[Float](11,12))
a(1)(0)(0) should be(Array[Float](13,14))
a(1)(0)(1) should be(Array[Float](15,16))
a(1)(0)(2) should be(Array[Float](17,18))
a(1)(1)(0) should be(Array[Float](19,20))
a(1)(1)(1) should be(Array[Float](21,22))
a(1)(1)(2) should be(Array[Float](23,24))
}
}
.run
.toScalaFuture

"read1DSeq" in doTensors
.flatMap { tensors =>
Do.garbageCollected(tensors.Tensor(Seq[Float](1,2)).read1DSeq).map {a=>
a should be(Seq[Float](1,2))
}
}
.run
.toScalaFuture

"read2DSeq" in doTensors
.flatMap { tensors =>
import tensors._
val seq = Seq(Seq[Float](1, 2), Seq[Float](3, 4), Seq[Float](5,6))
Do.garbageCollected(Tensor(seq).read2DSeq).map {a=>
a(0) should be(Seq[Float](1, 2))
a(1) should be(Seq[Float](3, 4))
a(2) should be(Seq[Float](5, 6))
}
}
.run
.toScalaFuture

"read3DSeq" in doTensors
.flatMap { tensors =>
import tensors._
val seq = Seq(Seq(Seq[Float](1, 2), Seq[Float](3, 4), Seq[Float](5,6)), Seq(Seq[Float](7, 8), Seq[Float](9, 10), Seq[Float](11,12)))
Do.garbageCollected(Tensor(seq).read3DSeq).map { a =>
a(0)(0) should be(Seq[Float](1,2))
a(0)(1) should be(Seq[Float](3,4))
a(0)(2) should be(Seq[Float](5,6))
a(1)(0) should be(Seq[Float](7,8))
a(1)(1) should be(Seq[Float](9,10))
a(1)(2) should be(Seq[Float](11,12))
}
}
.run
.toScalaFuture

"read4DSeq" in doTensors
.flatMap { tensors =>
import tensors._
val seq = Seq(Seq(Seq(Seq[Float](1, 2), Seq[Float](3, 4), Seq[Float](5,6)),
Seq(Seq[Float](7, 8), Seq[Float](9, 10), Seq[Float](11,12))),
Seq(Seq(Seq[Float](13, 14), Seq[Float](15, 16), Seq[Float](17,18)),
Seq(Seq[Float](19, 20), Seq[Float](21, 22), Seq[Float](23,24))))
Do.garbageCollected(Tensor(seq).read4DSeq).map { a =>
a(0)(0)(0) should be(Seq[Float](1,2))
a(0)(0)(1) should be(Seq[Float](3,4))
a(0)(0)(2) should be(Seq[Float](5,6))
a(0)(1)(0) should be(Seq[Float](7,8))
a(0)(1)(1) should be(Seq[Float](9,10))
a(0)(1)(2) should be(Seq[Float](11,12))
a(1)(0)(0) should be(Seq[Float](13,14))
a(1)(0)(1) should be(Seq[Float](15,16))
a(1)(0)(2) should be(Seq[Float](17,18))
a(1)(1)(0) should be(Seq[Float](19,20))
a(1)(1)(1) should be(Seq[Float](21,22))
a(1)(1)(2) should be(Seq[Float](23,24))
}
}
.run
.toScalaFuture

"random" in doTensors
.map { tensors =>
import tensors._
Expand Down

0 comments on commit d38b12e

Please sign in to comment.