diff --git a/build.sbt b/build.sbt index 7dae9c9..4ac0f03 100644 --- a/build.sbt +++ b/build.sbt @@ -6,7 +6,7 @@ lazy val scala211Version = "2.11.12" lazy val scala212Version = "2.12.11" lazy val scala213Version = "2.13.2" -lazy val dottyVersion = "0.26.0-RC1" +lazy val dottyVersion = "3.0.0-M3" organization in ThisBuild := "me.shadaj" scalaVersion in ThisBuild := scala212Version @@ -43,7 +43,7 @@ lazy val dottyTensorFlow = project .settings( name := "dotty-tensorflow", scalaVersion := dottyVersion, - libraryDependencies += "com.github.VirtuslabRnD.scalapy" % "scalapy-core_2.13" % "b3d8ddc81753a72d11e46601f7a9ad719e452e5c", + libraryDependencies += "me.shadaj" % "scalapy-core_2.13" % "0.4.0+11-aea09719", fork := true, javaOptions += s"-Djna.library.path=${"python3-config --prefix".!!.trim}/lib", projectDependencies ~=(_.map(_.withDottyCompat(dottyVersion))), @@ -68,8 +68,8 @@ lazy val scalaPyTensorFlowCross = crossProject(JVMPlatform, NativePlatform) .settings( name := "scalapy-tensorflow-cross", // scalapy-core version will replace the one in scalapy-numpy (maintaining binary compatibility) - libraryDependencies += "com.github.VirtuslabRnD.scalapy" %%% "scalapy-core" % "b3d8ddc81753a72d11e46601f7a9ad719e452e5c", - libraryDependencies += "com.github.VirtuslabRnD.scalapy-numpy" %%% "scalapy-numpy" % "8a85b7068e9a377ba4b97e2cf9b7bbe008066202" exclude("me.shadaj", "scalapy-core"), + libraryDependencies += "me.shadaj" %%% "scalapy-core" % "0.4.0+11-aea09719", + libraryDependencies += "me.shadaj" %%% "scalapy-numpy" % "0.1.0+13-442717a6+20210113-0045" excludeAll("com.github.shadaj.scalapy", "scalapy-core_2.13"), projectDependencies ~=(_.map(_.withDottyCompat(dottyVersion))), ).jvmSettings( scalaVersion := scala213Version, @@ -122,7 +122,7 @@ lazy val scalaPyTensorFlowExamplesCross = crossProject(JVMPlatform, NativePlatfo .dependsOn(scalaPyTensorFlowCross) lazy val scalaPyTensorFlowExamplesJVM = scalaPyTensorFlowExamplesCross.jvm.settings(name := "tensorflow-example-jvm") -lazy val scalaPyTensorFlowExamplesNative = scalaPyTensorFlowExamplesCross.native.settings(name := "tensorflow-example-native") +lazy val scalaPyTensorFlowExamplesNative = scalaPyTensorFlowExamplesCross.native.settings(name := "tensorflow-example-native") // To make sure that changes to project structure are picked up by sbt without an explicit `reload` diff --git a/dotty-tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/BidirectionalLSTMExample.scala b/dotty-tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/BidirectionalLSTMExample.scala index ac0c83f..3bf879c 100644 --- a/dotty-tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/BidirectionalLSTMExample.scala +++ b/dotty-tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/BidirectionalLSTMExample.scala @@ -1,5 +1,6 @@ package me.shadaj.scalapy.tensorflow.example +import me.shadaj.scalapy.numpy.{NDArray, PythonSeq} import me.shadaj.scalapy.py import me.shadaj.scalapy.tensorflow.api.TensorFlow import me.shadaj.scalapy.tensorflow.nd2Tensor @@ -8,6 +9,24 @@ import me.shadaj.scalapy.tensorflow.api.keras.optimizers.OptimizerEnum import me.shadaj.scalapy.tensorflow.scala.utils.Modules.{numpy => np} import me.shadaj.scalapy.tensorflow.api.keras.metrics.Metric import me.shadaj.scalapy.tensorflow.api.keras.activations.Activation +import me.shadaj.scalapy.py.SeqConverters +import me.shadaj.scalapy.tensorflow.ndd2Tensor +import me.shadaj.scalapy.py +import me.shadaj.scalapy.py.Dynamic.global +import me.shadaj.scalapy.readwrite._ +import me.shadaj.scalapy.readwrite.Reader._ +import me.shadaj.scalapy.py +import me.shadaj.scalapy.tensorflow.TensorFlow +import me.shadaj.scalapy.tensorflow.scala.utils.Modules +import me.shadaj.scalapy.tensorflow.nd2Tensor +import me.shadaj.scalapy.py.SeqConverters +import me.shadaj.scalapy.tensorflow.scala.utils.Modules._ +import me.shadaj.scalapy.numpy.{NDArray, PythonSeq} + +import Int.int2long +import scala.language.implicitConversions + + import Int.int2long import scala.language.implicitConversions @@ -33,10 +52,10 @@ object BidirectionalLSTMExample extends Runnable { println(s"${xTest.length} test sequences") println("Pad sequences (samples x time)") - val xTrain1 = sequence.padSequences(xTrain, maxLen = Some(maxLen)).astype(np.float32) - val xTest1 = sequence.padSequences(xTest, maxLen = Some(maxLen)).astype(np.float32) - val yTrain1 = yTrain.astype(np.float32) - val yTest1 = yTest.astype(np.float32) + val xTrain1 = sequence.padSequences(xTrain, maxLen = Some(maxLen)).astype(np.float32).as[NDArray[Float]] + val xTest1 = sequence.padSequences(xTest, maxLen = Some(maxLen)).astype(np.float32).as[NDArray[Float]] + val yTrain1 = yTrain.astype(np.float32).as[NDArray[Float]] + val yTest1 = yTest.astype(np.float32).as[NDArray[Float]] println(s"xTrain shape: ${xTrain1.shape}") println(s"xTest shape: ${xTest1.shape}") @@ -48,8 +67,8 @@ object BidirectionalLSTMExample extends Runnable { layers.Dense(1, activation = Some(Activation.Sigmoid)) )) - model.compile(OptimizerEnum.Adam, Some(keras1.backend.binaryCrossentropy), metrics = Seq(Metric.Accuracy)) - + model.compile(OptimizerEnum.Adam, Some(keras1.backend.binaryCrossentropy), metrics = Seq[String]("accuracy")) + println("Train...") val epochs = Option(System.getenv("EPOCH_COUNT")).map(_.toInt).getOrElse(2) model.fit(xTrain1, yTrain1, batchSize = Some(batchSize), epochs = epochs, validationData = Some((xTest1, yTest1))) diff --git a/dotty-tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/GradientDescentOptimizerExample.scala b/dotty-tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/GradientDescentOptimizerExample.scala index 592b2c1..266ce47 100644 --- a/dotty-tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/GradientDescentOptimizerExample.scala +++ b/dotty-tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/GradientDescentOptimizerExample.scala @@ -1,12 +1,17 @@ package me.shadaj.scalapy.tensorflow.example +import me.shadaj.scalapy.numpy.{NDArray, PythonSeq} import me.shadaj.scalapy.py -import me.shadaj.scalapy.tensorflow.api.Tensor +import me.shadaj.scalapy.py.SeqConverters +import me.shadaj.scalapy.numpy.PythonSeq.seqToPythonSeq +import me.shadaj.scalapy.tensorflow.{Tensor => PyTensor} +import me.shadaj.scalapy.tensorflow.{Variable => PyVariable} import me.shadaj.scalapy.tensorflow.api.scalaUtils.CloseableResourceManager -import me.shadaj.scalapy.tensorflow.api.Tensor.{TensorToPyTensor} +import me.shadaj.scalapy.tensorflow.api.Tensor.TensorToPyTensor import me.shadaj.scalapy.tensorflow.{nd2Tensor => nd2TensorPy} -import me.shadaj.scalapy.tensorflow.api.{TensorFlow => tf} +import me.shadaj.scalapy.tensorflow.api.{Variable, Tensor, TensorFlow => tf} import me.shadaj.scalapy.tensorflow.scala.utils.Modules.{numpy => np} + import scala.language.implicitConversions /** @@ -21,7 +26,7 @@ object GradientDescentOptimizerExample extends Runnable { def run(): Unit = { // Starting data - val xData = np.random.rand(100).astype(np.float32) + val xData = np.random.rand(100).astype(np.float32).as[NDArray[Float]] val yData = (xData * 0.1f) + 0.3f // Variables @@ -35,11 +40,11 @@ object GradientDescentOptimizerExample extends Runnable { def loss = () => tf.reduceMean(tf.square(y() - yData)) // Function to calculate gradients - def grad(): Option[(Tensor, Seq[Tensor])] = + def grad(): Option[(PyTensor, Seq[PyTensor])] = CloseableResourceManager.withResource(tf.GradientTape()) { tape => val lossValue = loss() val gradients: Seq[Tensor] = tape.gradient(lossValue, Seq(W, b)) - (lossValue, gradients) + (lossValue.underlying, gradients.map(_.underlying)) } // Select optimizer SGD @@ -52,7 +57,9 @@ object GradientDescentOptimizerExample extends Runnable { val num_epochs = Option(System.getenv("EPOCH_COUNT")).map(_.toInt).getOrElse(400) for (epoch <- 1 to num_epochs) { val (lossValue, grads) = grad().get - optimizer.applyGradients(grads.zip(Seq(W, b))) + val aa = grads.zip(Seq(W.underlying, b.underlying)) + optimizer.applyGradients(aa) + //optimizer.applyGradients(grads.zip(Seq(W, b))) if (epoch % 50 == 0) println(s"Epoch ${epoch}: Loss: ${lossValue.numpy()}") } diff --git a/dotty-tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/LambdaExample.scala b/dotty-tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/LambdaExample.scala new file mode 100644 index 0000000..b2ade51 --- /dev/null +++ b/dotty-tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/LambdaExample.scala @@ -0,0 +1,49 @@ +package me.shadaj.scalapy.tensorflow.example + +import me.shadaj.scalapy.py +import me.shadaj.scalapy.py.{Any, Dynamic, PyQuote, SeqConverters, local} +import me.shadaj.scalapy.readwrite.Reader._ + + +object LambdaExample extends Runnable { + def run(): Unit = { + println("Hello, World") + val listLengthPython = py.Dynamic.global.len(List(1, 2, 3).toPythonProxy) + + local { + var count = 0 + val testLambda = Any.from(() => { + count += 1 + s"count: $count" + }) + + val testLambda2 = Any.from((x: Seq[Int]) => x.sum) + + assert(py"$testLambda()".as[String] == "count: 1") + assert(py"$testLambda()".as[String] == "count: 2") + assert(py"$testLambda2([1, 2, 3])".as[Int] == 6) + } + + val lambdaToScala = Dynamic.global.len.as[Any => Int] + assert(lambdaToScala(Seq[Any]().toPythonProxy) == 0) + assert(lambdaToScala(Seq(1, 2, 3).toPythonProxy) == 3) + + /*@py.native trait PyString extends py.Object { + def count(subsequence: String): Int = py.native + }*/ + + /*{ + @py.native trait PythonRandomModule extends py.Object { + def Random(a: String, s: Seq[Int]): py.Dynamic = py.native + } + + val random = py.module("random").as[PythonRandomModule] + //println(random.Random("123", 4)) + + val string1 = py.module("string").digits.as[PyString] + val string2 = py.module("string").digits.as[PyString] + // string: PyString = 0123456789 + println(string1.count("123")) + }*/ + } +} diff --git a/dotty-tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/Main.scala b/dotty-tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/Main.scala index a98bb6e..b7abaa8 100644 --- a/dotty-tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/Main.scala +++ b/dotty-tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/Main.scala @@ -6,6 +6,7 @@ object Main { case Array("BidirectionalLSTMExample") => BidirectionalLSTMExample case Array("GradientDescentOptimizerExample") => GradientDescentOptimizerExample case Array("MnistExample") => MnistExample + case Array("LambdaExample") => LambdaExample case _ => throw new IllegalArgumentException("usage: sbt 'run '") }).run() } diff --git a/dotty-tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/MnistExample.scala b/dotty-tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/MnistExample.scala index ddd1c06..d708500 100644 --- a/dotty-tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/MnistExample.scala +++ b/dotty-tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/MnistExample.scala @@ -1,6 +1,6 @@ package me.shadaj.scalapy.tensorflow.example -import me.shadaj.scalapy.numpy.NumPy +import me.shadaj.scalapy.numpy.{NDArray, NumPy, PythonSeq} import me.shadaj.scalapy.py import me.shadaj.scalapy.tensorflow.scala.utils.Modules._ import me.shadaj.scalapy.tensorflow.api.{TensorFlow => tf} @@ -8,6 +8,8 @@ import me.shadaj.scalapy.tensorflow.api.keras.datasets.Mnist import me.shadaj.scalapy.tensorflow.api.keras.models._ import me.shadaj.scalapy.tensorflow.api.keras.metrics.Metric import me.shadaj.scalapy.tensorflow.api.keras.activations.Activation +import me.shadaj.scalapy.py.SeqConverters + import Int.int2long import scala.language.implicitConversions @@ -28,8 +30,8 @@ object MnistExample extends Runnable { val mnist: Mnist = kerasA.datasets.mnist val ((xTrainOrig, yTrainOrig), (xTest, yTest)) = mnist.loadData() val trainingSetSize = Option(System.getenv("TRAINING_SET_SIZE")).map(_.toInt) - val xTrain = trainingSetSize.map(tss => xTrainOrig.slice(0, tss)).getOrElse(xTrainOrig) - val yTrain = trainingSetSize.map(tss => yTrainOrig.slice(0, tss)).getOrElse(yTrainOrig) + val xTrain = trainingSetSize.map(tss => xTrainOrig.slice(0, tss)).getOrElse(xTrainOrig).asInstanceOf[NDArray[Long]] + val yTrain = trainingSetSize.map(tss => yTrainOrig.slice(0, tss)).getOrElse(yTrainOrig).asInstanceOf[NDArray[Long]] val (train, test, inputShape) = if (K.imageDataFormat() == "channels_first") { @@ -46,8 +48,8 @@ object MnistExample extends Runnable { (train, test, inputShape) } - val trainImages = train.astype(np.float32) / 255.0f - val testImages = test.astype(np.float32) / 255.0f + val trainImages = train.astype(np.float32).as[NDArray[Float]] / 255.0f + val testImages = test.astype(np.float32).as[NDArray[Float]] / 255.0f println(s"xTrain shape: ${trainImages.shape}") println(s"${trainImages.shape(0)} train samples") @@ -70,12 +72,13 @@ object MnistExample extends Runnable { model.compile( loss = Some(kerasA.losses.categoricalCrossentropy), optimizer = kerasA.optimizers.Adadelta(), - metrics = Seq(Metric.Accuracy) + metrics = Seq[String](Metric.Accuracy) ) - model.fit(x = trainImages, y = trainLabels, batchSize = Some(batchSize), epochs = epochs, verbose = 1, validationData = Some((testImages, testLabels))) + model.fit(x = trainImages, y = trainLabels.as[NDArray[Float]], batchSize = Some(batchSize), epochs = epochs, verbose = 1, + validationData = Some((testImages.as[NDArray[Float]], testLabels.as[NDArray[Float]]))) - val score = model.evaluate(x = testImages, y = testLabels, verbose = 0) + val score = model.evaluate(x = testImages, y = testLabels.as[NDArray[Float]], verbose = 0) println(s"Test loss: ${score(0)}") println(s"Test accuracy: ${score(1)}") diff --git a/dotty-tensorflow/project/build.properties b/dotty-tensorflow/project/build.properties index 0837f7a..d91c272 100644 --- a/dotty-tensorflow/project/build.properties +++ b/dotty-tensorflow/project/build.properties @@ -1 +1 @@ -sbt.version=1.3.13 +sbt.version=1.4.6 diff --git a/dotty-tensorflow/project/plugins.sbt b/dotty-tensorflow/project/plugins.sbt new file mode 100644 index 0000000..9033973 --- /dev/null +++ b/dotty-tensorflow/project/plugins.sbt @@ -0,0 +1,2 @@ + +addSbtPlugin("ch.epfl.lamp" % "sbt-dotty" % "0.5.1") diff --git a/dotty-tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/api/TensorFlow.scala b/dotty-tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/api/TensorFlow.scala index e9d8a6b..0a53057 100644 --- a/dotty-tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/api/TensorFlow.scala +++ b/dotty-tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/api/TensorFlow.scala @@ -1,19 +1,26 @@ package me.shadaj.scalapy.tensorflow.api import me.shadaj.scalapy.tensorflow.scala.utils.Modules.{tensorflow => pyTensorflow} -import me.shadaj.scalapy.tensorflow.{TensorFlow => PyTensorFlow, Tensor => PyTensor} +import me.shadaj.scalapy.tensorflow.{TensorFlow => PyTensorFlow} +import me.shadaj.scalapy.numpy.PythonSeq.seqToPythonSeq +import me.shadaj.scalapy.tensorflow.seq2Tensor import random.Random import keras.Keras import compat.Compat import nn.NN import train.Train + import scala.language.implicitConversions import scalaUtils.PythonOption._ import scalaUtils.PythonUnion._ -import me.shadaj.scalapy.tensorflow.seq2Tensor - import me.shadaj.scalapy.py.PyFunction import me.shadaj.scalapy.py +import me.shadaj.scalapy.py.|.fromLeft +import me.shadaj.scalapy.py.|.fromRight +import me.shadaj.scalapy.tensorflow.api.scalaUtils.PythonUnion.fromSingleAndTupleUnion +import me.shadaj.scalapy.tensorflow.api.Tensor.TensorToPyTensor +import me.shadaj.scalapy.tensorflow.api.Tensor.PyTensorToTensor + object TensorFlow { private val tf: PyTensorFlow = pyTensorflow diff --git a/dotty-tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/api/keras/layers/Conv2D.scala b/dotty-tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/api/keras/layers/Conv2D.scala index a0e3897..46f4b8f 100644 --- a/dotty-tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/api/keras/layers/Conv2D.scala +++ b/dotty-tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/api/keras/layers/Conv2D.scala @@ -3,7 +3,10 @@ package me.shadaj.scalapy.tensorflow.api.keras.layers import me.shadaj.scalapy.tensorflow.keras.layers.{Conv2D => PyConv2D} import me.shadaj.scalapy.tensorflow.api.scalaUtils.PythonUnion._ import me.shadaj.scalapy.tensorflow.api.scalaUtils.PythonOption._ + import scala.language.implicitConversions +import me.shadaj.scalapy.tensorflow.api.scalaUtils +import me.shadaj.scalapy.py.| class Conv2D private[api] (override val underlying: PyConv2D) extends Layer(underlying) { def filters: Int = underlying.filters diff --git a/dotty-tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/api/keras/models/Sequential.scala b/dotty-tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/api/keras/models/Sequential.scala index 449f1b7..06edc89 100644 --- a/dotty-tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/api/keras/models/Sequential.scala +++ b/dotty-tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/api/keras/models/Sequential.scala @@ -5,12 +5,22 @@ import me.shadaj.scalapy.tensorflow.api.scalaUtils.PythonOption._ import me.shadaj.scalapy.tensorflow.api.scalaUtils.PythonUnion._ import me.shadaj.scalapy.tensorflow.scala.utils.Modules.{numpy => np} import me.shadaj.scalapy.tensorflow.api.scalaUtils.PythonEnum._ -import me.shadaj.scalapy.tensorflow.api.scalaUtils.PythonEnum import me.shadaj.scalapy.tensorflow.api.keras.layers._ import me.shadaj.scalapy.tensorflow.api.keras.optimizers._ import me.shadaj.scalapy.numpy.NDArray import me.shadaj.scalapy.py.PyFunction + import scala.language.implicitConversions +import me.shadaj.scalapy.numpy.PythonSeq +import me.shadaj.scalapy.py +import me.shadaj.scalapy.py.SeqConverters +import me.shadaj.scalapy.tensorflow.TensorFlow +import me.shadaj.scalapy.tensorflow.nd2Tensor +import me.shadaj.scalapy.py.SeqConverters +import me.shadaj.scalapy.tensorflow.scala.utils.Modules._ +import me.shadaj.scalapy.numpy.{NDArray, PythonSeq} +import me.shadaj.scalapy.tensorflow.api.scalaUtils.PythonOption.toScalaOption +import me.shadaj.scalapy.readwrite.Writer class Sequential private[api] (val underlying: PySequential) extends PythonType[PySequential] { @@ -19,11 +29,11 @@ class Sequential private[api] (val underlying: PySequential) extends PythonType[ def compile( optimizer: OptimizerEnum | Optimizer = OptimizerEnum.RMSprop, loss: Option[PyFunction] = None, - metrics: Seq[String] = Seq.empty, - lossWeights: Option[Seq[(Double, Double)]] = None, - sampleWeightMode: Option[String] = None, - weightedMetrics: Seq[String] = Seq.empty, - targetTensors: Option[String] = None + metrics: PythonSeq[String] = PythonSeq.emptyString, + lossWeights: py.NoneOr[PythonSeq[(Double, Double)]] = py.None, + sampleWeightMode: py.NoneOr[String] = None, + weightedMetrics: PythonSeq[String] = PythonSeq.emptyString, + targetTensors: py.NoneOr[String] = None ) = underlying.compile( optimizer, diff --git a/dotty-tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/api/keras/optimizers/Optimizer.scala b/dotty-tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/api/keras/optimizers/Optimizer.scala index 2d46349..cf66b83 100644 --- a/dotty-tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/api/keras/optimizers/Optimizer.scala +++ b/dotty-tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/api/keras/optimizers/Optimizer.scala @@ -2,9 +2,13 @@ package me.shadaj.scalapy.tensorflow.api.keras.optimizers import me.shadaj.scalapy.tensorflow.compat.v1.Operation import me.shadaj.scalapy.tensorflow.api.{Tensor, Variable} - +import me.shadaj.scalapy.tensorflow.{Variable => PyVariable} +import me.shadaj.scalapy.tensorflow.{Tensor => PyTensor} import me.shadaj.scalapy.tensorflow.keras.optimizers.{Optimizer => PyOptimizer} import me.shadaj.scalapy.tensorflow.api.scalaUtils.PythonType +import me.shadaj.scalapy.py.SeqConverters +import me.shadaj.scalapy.numpy.PythonSeq + import scala.language.implicitConversions import me.shadaj.scalapy.py @@ -12,8 +16,7 @@ trait Optimizer private[api] (val underlying: PyOptimizer) extends PythonType[Py // TODO loss should be a function () => py.Any def minimize(loss: py.Any, varList: Seq[Variable]): Operation = underlying.minimize(loss, varList.map(_.underlying)) - def applyGradients(gradsAndVars: Seq[(Tensor, Variable)]): Operation = - underlying.apply_gradients(gradsAndVars.map { - case (tensor, variable) => (tensor.underlying, variable.underlying) - }) + def applyGradients(gradsAndVars: PythonSeq[(PyTensor, PyVariable)]): Operation = { + underlying.apply_gradients(gradsAndVars) + } } diff --git a/dotty-tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/api/scalaUtils/PythonUnion.scala b/dotty-tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/api/scalaUtils/PythonUnion.scala index 1f56b6f..745ee4f 100644 --- a/dotty-tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/api/scalaUtils/PythonUnion.scala +++ b/dotty-tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/api/scalaUtils/PythonUnion.scala @@ -1,5 +1,6 @@ package me.shadaj.scalapy.tensorflow.api.scalaUtils +import me.shadaj.scalapy.numpy.PythonSeq import me.shadaj.scalapy.py import scala.language.implicitConversions import scala.reflect.ClassTag @@ -15,10 +16,11 @@ object PythonUnion { } implicit def fromPythonUnion[A, B](u: py.|[A, B]): A | B = { - if (u.isLeft) { - u.value.asInstanceOf[A] + val uu: py.|[A, B] = u.asInstanceOf[py.|[A, B]] + if (uu.isLeft) { + uu.value.asInstanceOf[A] } else { - u.value.asInstanceOf[B] + uu.value.asInstanceOf[B] } } @@ -39,7 +41,7 @@ object PythonUnion { implicit def fromPythonTypeSeqsUnion[X <: py.Any, Y <: py.Any, A <: PythonType[X], B <: Seq[PythonType[Y]]]( u: A | B - )(implicit ev1: ClassTag[A], ev2: ClassTag[B]): py.|[X, Seq[Y]] = + )(implicit ev1: ClassTag[A], ev2: ClassTag[B]): py.|[X, PythonSeq[Y]] = u match { case a: A => py.|.fromLeft(a.underlying) case b: B => py.|.fromRight(b.map(_.underlying)) diff --git a/project/build.properties b/project/build.properties index 797e7cc..d91c272 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.3.10 +sbt.version=1.4.6 diff --git a/project/plugins.sbt b/project/plugins.sbt index 240571c..90ff9c4 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,6 +1,6 @@ addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.0.0") addSbtPlugin("org.portable-scala" % "sbt-scala-native-crossproject" % "1.0.0") -addSbtPlugin("org.scala-native" % "sbt-scala-native" % "0.3.9") +addSbtPlugin("org.scala-native" % "sbt-scala-native" % "0.4.0-M2") addSbtPlugin("com.dwijnand" % "sbt-dynver" % "4.0.0") @@ -14,3 +14,7 @@ addSbtPlugin("ch.epfl.lamp" % "sbt-dotty" % "0.4.0") // "2.4.0" is just sbt plugin version and NOT scalafmt version itself addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.0") + +addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.10.0-RC1") + +addSbtPlugin("ch.epfl.lamp" % "sbt-dotty" % "0.5.1") \ No newline at end of file diff --git a/tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/BidirectionalLSTMExample.scala b/tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/BidirectionalLSTMExample.scala index 5de9376..a11bd1e 100644 --- a/tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/BidirectionalLSTMExample.scala +++ b/tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/BidirectionalLSTMExample.scala @@ -4,7 +4,10 @@ import me.shadaj.scalapy.py import me.shadaj.scalapy.tensorflow.TensorFlow import me.shadaj.scalapy.tensorflow.scala.utils.Modules import me.shadaj.scalapy.tensorflow.nd2Tensor +import me.shadaj.scalapy.py.SeqConverters import me.shadaj.scalapy.tensorflow.scala.utils.Modules._ +import me.shadaj.scalapy.numpy.{NDArray, PythonSeq} + import Int.int2long import scala.language.implicitConversions @@ -32,10 +35,10 @@ object BidirectionalLSTMExample extends Runnable { println(s"${xTest.length} test sequences") println("Pad sequences (samples x time)") - val xTrain1 = sequence.pad_sequences(xTrain, maxlen = maxlen).astype(np.float32) - val xTest1 = sequence.pad_sequences(xTest, maxlen = maxlen).astype(np.float32) - val yTrain1 = yTrain.astype(np.float32) - val yTest1 = yTest.astype(np.float32) + val xTrain1 = sequence.pad_sequences(xTrain, maxlen = maxlen).astype(np.float32).as[NDArray[Float]] + val xTest1 = sequence.pad_sequences(xTest, maxlen = maxlen).astype(np.float32).as[NDArray[Float]] + val yTrain1 = yTrain.astype(np.float32).as[NDArray[Float]] + val yTest1 = yTest.astype(np.float32).as[NDArray[Float]] println(s"xTrain shape: ${xTrain1.shape}") println(s"xTest shape: ${xTest1.shape}") @@ -45,7 +48,7 @@ object BidirectionalLSTMExample extends Runnable { model.add(layers.Dropout(0.5)) model.add(layers.Dense(1, activation = "sigmoid")) - model.compile("adam", keras1.backend.binary_crossentropy, metrics = Seq("accuracy")) + model.compile("adam", keras1.backend.binary_crossentropy, metrics = Seq("accuracy") println("Train...") val epochs = Option(System.getenv("EPOCH_COUNT")).map(_.toInt).getOrElse(4) diff --git a/tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/GradientDescentOptimizerExample.scala b/tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/GradientDescentOptimizerExample.scala index 767c551..8487201 100644 --- a/tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/GradientDescentOptimizerExample.scala +++ b/tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/GradientDescentOptimizerExample.scala @@ -1,9 +1,18 @@ package me.shadaj.scalapy.tensorflow.example +import me.shadaj.scalapy.numpy.PythonSeq import me.shadaj.scalapy.tensorflow.scala.utils.Modules._ import me.shadaj.scalapy.tensorflow.Tensor import me.shadaj.scalapy.tensorflow.scala.utils.ContextManager import me.shadaj.scalapy.tensorflow._ +import me.shadaj.scalapy.py.SeqConverters +import me.shadaj.scalapy.numpy.PythonSeq.seqToPythonSeq +import me.shadaj.scalapy.py +import me.shadaj.scalapy.py.Dynamic.global +import me.shadaj.scalapy.readwrite.Writer +import me.shadaj.scalapy.readwrite.Reader +import me.shadaj.scalapy.py.SeqConverters +import me.shadaj.scalapy.tensorflow.ndd2Tensor /** * This example performs linear regression on randomized input conforming to y= 0.1 * x + 0.3. @@ -23,8 +32,8 @@ object GradientDescentOptimizerExample extends Runnable { val yData = (xData * 0.1f) + 0.3f // Variables - val W = tf.Variable(tf.random.uniform(shape = Seq(1), minval = -1, maxval = 1)) - val b = tf.Variable(tf.zeros(Seq(1))) + val W: Variable = tf.Variable(tf.random.uniform(shape = Seq(1), minval = -1, maxval = 1)) + val b: Variable = tf.Variable(tf.zeros(Seq(1))) // Function to calculate output def y = () => W * xData + b @@ -34,9 +43,9 @@ object GradientDescentOptimizerExample extends Runnable { // Function to calculate gradients def grad(): Option[(Tensor, Seq[Tensor])] = - ContextManager.withContext(tf.GradientTape()) { tape => - val loss_value = loss() - val gradients = tape.gradient(loss_value, Seq(W, b)) + ContextManager.withContext(tf.GradientTape()) { tape: GradientTape => + val loss_value: Tensor = loss() + val gradients: Seq[Tensor] = tape.gradient(target=loss_value, sources=Seq(W, b)) (loss_value, gradients) } @@ -44,14 +53,15 @@ object GradientDescentOptimizerExample extends Runnable { val optimizer = tf.keras.optimizers.SGD(learning_rate = 0.1, momentum = 0.9) // Initial Learning step - val (loss_value, grads) = grad().get + val (loss_value, _) = grad().get println(s"Step: 0, Initial Loss: ${loss_value.numpy()}") // Learning steps - val num_epochs = Option(System.getenv("EPOCH_COUNT")).map(_.toInt).getOrElse(400) + val num_epochs: Int = Option(System.getenv("EPOCH_COUNT")).map(_.toInt).getOrElse(400) for (epoch <- 1 to num_epochs) { - val (loss_value, grads) = grad().get - optimizer.apply_gradients(grads.zip(Seq(W, b))) + val (loss_value, grads: Seq[Tensor]) = grad().get + val aa: Seq[(Tensor, Variable)] = grads.zip(Seq(W, b)) + optimizer.apply_gradients(aa.toPythonProxy.as[PythonSeq[(Tensor, Variable)]]) if (epoch % 50 == 0) println(s"Epoch ${epoch}: Loss: ${loss_value.numpy()}") } diff --git a/tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/LambdaExample.scala b/tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/LambdaExample.scala new file mode 100644 index 0000000..9f81dd0 --- /dev/null +++ b/tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/LambdaExample.scala @@ -0,0 +1,49 @@ +package me.shadaj.scalapy.tensorflow.example + +import me.shadaj.scalapy.readwrite.Reader._ +import me.shadaj.scalapy.py +import me.shadaj.scalapy.py.{Any, Dynamic, PyQuote, SeqConverters, local} + + +object LambdaExample extends Runnable { + def run(): Unit = { + println("Hello, World") + val listLengthPython = py.Dynamic.global.len(List(1, 2, 3).toPythonProxy) + + local { + var count = 0 + val testLambda = Any.from(() => { + count += 1 + s"count: $count" + }) + + val testLambda2 = Any.from((x: Seq[Int]) => x.sum) + + assert(py"$testLambda()".as[String] == "count: 1") + assert(py"$testLambda()".as[String] == "count: 2") + assert(py"$testLambda2([1, 2, 3])".as[Int] == 6) + } + + val lambdaToScala = Dynamic.global.len.as[Any => Int] + assert(lambdaToScala(Seq[Any]().toPythonProxy) == 0) + assert(lambdaToScala(Seq(1, 2, 3).toPythonProxy) == 3) + + @py.native trait PyString extends py.Object { + def count(subsequence: String): Int = py.native + } + + /*{ + @py.native trait PythonRandomModule extends py.Object { + def Random(a: String, s: Seq[Int]): py.Dynamic = py.native + } + + val random = py.module("random").as[PythonRandomModule] + //println(random.Random("123", 4)) + + val string1 = py.module("string").digits.as[PyString] + val string2 = py.module("string").digits.as[PyString] + // string: PyString = 0123456789 + println(string1.count("123")) + }*/ + } +} diff --git a/tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/Main.scala b/tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/Main.scala index 5057d31..ea80280 100644 --- a/tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/Main.scala +++ b/tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/Main.scala @@ -6,6 +6,7 @@ object Main { case Array("MnistExample") => MnistExample case Array("GradientDescentOptimizerExample") => GradientDescentOptimizerExample case Array("BidirectionalLSTMExample") => BidirectionalLSTMExample + case Array("LambdaExample") => LambdaExample case _ => throw new IllegalArgumentException("usage: sbt 'run '") }).run() } diff --git a/tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/MnistExample.scala b/tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/MnistExample.scala index df5b0cf..62b71ea 100644 --- a/tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/MnistExample.scala +++ b/tensorflow-examples/src/main/scala/me/shadaj/scalapy/tensorflow/example/MnistExample.scala @@ -2,6 +2,11 @@ package me.shadaj.scalapy.tensorflow.example import me.shadaj.scalapy.tensorflow.scala.utils.Modules._ import me.shadaj.scalapy.tensorflow.keras.datasets.Mnist +import me.shadaj.scalapy.numpy.{NDArray, PythonSeq} +import me.shadaj.scalapy.py.Dynamic.global.{applyDynamic, applyDynamicNamed} +import me.shadaj.scalapy.py.SeqConverters +import me.shadaj.scalapy.tensorflow.compat.v1.{PythonDict, Session} +import me.shadaj.scalapy.tensorflow.keras.layers.Layer object MnistExample extends Runnable { @@ -19,10 +24,10 @@ object MnistExample extends Runnable { val img_rows, img_cols = 28 val mnist: Mnist = keras.datasets.mnist - val ((x_train_orig, y_train_orig), (x_test, y_test)) = mnist.load_data() + val ((x_train_orig: NDArray[Long], y_train_orig), (x_test, y_test)) = mnist.load_data() val trainingSetSize = Option(System.getenv("TRAINING_SET_SIZE")).map(_.toInt) - val x_train = trainingSetSize.map(tss => x_train_orig.slice(0, tss)).getOrElse(x_train_orig) - val y_train = trainingSetSize.map(tss => y_train_orig.slice(0, tss)).getOrElse(y_train_orig) + val x_train = trainingSetSize.map(tss => x_train_orig.slice(0, tss)).getOrElse(x_train_orig).asInstanceOf[NDArray[Long]] + val y_train = trainingSetSize.map(tss => y_train_orig.slice(0, tss)).getOrElse(y_train_orig).asInstanceOf[NDArray[Long]] val (train, test, input_shape) = if (K.image_data_format == "channels_first") { @@ -40,19 +45,19 @@ object MnistExample extends Runnable { } // TODO: not type safe - val trainImages = train.astype(np.float32) / 255.0f - val testImages = test.astype(np.float32) / 255.0f + val trainImages = train.astype(np.float32).as[NDArray[Float]] / 255.0f + val testImages = test.astype(np.float32).as[NDArray[Float]] / 255.0f println(s"x_train shape: ${trainImages.shape}") println(s"${trainImages.shape(0)} train samples") println(s"${testImages.shape(0)} test samples") - val trainLabels = keras.utils.to_categorical(y_train, num_classes).astype(np.float32) - val testLabels = keras.utils.to_categorical(y_test, num_classes).astype(np.float32) + val trainLabels = keras.utils.to_categorical(y_train, num_classes).astype(np.float32).as[NDArray[Float]] + val testLabels = keras.utils.to_categorical(y_test, num_classes).astype(np.float32).as[NDArray[Float]] val model = keras.models.Sequential() model.add( - layers.Conv2D(filters = 32, kernel_size = (3, 3), activation = "relu", kwargs = Map("input_shape" -> input_shape)) + layers.Conv2D(filters = 32, kernel_size = (3, 3), activation = "relu", kwargs = Map("input_shape" -> input_shape)).as[Layer] ) model.add(layers.Conv2D(filters = 64, kernel_size = (3, 3), activation = "relu")) model.add(layers.MaxPooling2D((2, 2))) @@ -65,12 +70,13 @@ object MnistExample extends Runnable { model.compile( loss = keras.losses.categorical_crossentropy, optimizer = keras.optimizers.Adadelta(), - metrics = Seq("accuracy") + metrics = Seq("accuracy").toPythonCopy.as[PythonSeq[String]] ) - model.fit(x = trainImages, y = trainLabels, batch_size = batch_size, epochs = epochs, verbose = 1, validation_data = (testImages, testLabels)) + model.fit(x = trainImages, y = trainLabels.as[NDArray[Float]], batch_size = batch_size, epochs = epochs, verbose = 1, + validation_data = (testImages.as[NDArray[Float]], testLabels.as[NDArray[Float]])) - val score = model.evaluate(x = testImages, y = testLabels, verbose = 0) + val score = model.evaluate(x = testImages, y = testLabels.as[NDArray[Float]], verbose = 0) println(s"Test loss: ${score(0)}") println(s"Test accuracy: ${score(1)}") diff --git a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/GradientTape.scala b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/GradientTape.scala index 00d852d..491e21a 100644 --- a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/GradientTape.scala +++ b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/GradientTape.scala @@ -1,13 +1,13 @@ package me.shadaj.scalapy.tensorflow +import me.shadaj.scalapy.numpy.PythonSeq import me.shadaj.scalapy.py -import me.shadaj.scalapy.py.{PyValue, Reader} import me.shadaj.scalapy.tensorflow.scala.utils.Context @py.native trait GradientTape extends py.Object with Context { // TODO: returns Tensor or None - def gradient(target: Tensor, sources: Seq[Variable]): Seq[Tensor] = py.native + def gradient(target: Tensor, sources: PythonSeq[Variable], output_gradients: py.Any = py.None): Seq[Tensor] = py.native def watch(tensor: Tensor): Unit = py.native def __enter__(): Unit = py.native def __exit__(typ: py.Any = py.None, value: py.Any = py.None, traceback: py.Any = py.None): Unit = py.native diff --git a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/Tensor.scala b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/Tensor.scala index 354ea1f..8d04f39 100644 --- a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/Tensor.scala +++ b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/Tensor.scala @@ -1,6 +1,5 @@ package me.shadaj.scalapy.tensorflow -import me.shadaj.scalapy.numpy.NDArray import me.shadaj.scalapy.py @py.native diff --git a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/TensorFlow.scala b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/TensorFlow.scala index 11cc93f..544b3ba 100644 --- a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/TensorFlow.scala +++ b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/TensorFlow.scala @@ -1,7 +1,8 @@ package me.shadaj.scalapy.tensorflow +import me.shadaj.scalapy.numpy.PythonSeq import me.shadaj.scalapy.py -import me.shadaj.scalapy.py.{PyFunction, Writer, |} +import me.shadaj.scalapy.py.{PyFunction, |} import me.shadaj.scalapy.tensorflow.compat.Compat import me.shadaj.scalapy.tensorflow.keras.Keras import me.shadaj.scalapy.tensorflow.nn.NN @@ -41,7 +42,7 @@ trait TensorFlow extends py.Object { def reshape(tensor: Tensor, shape: Tensor): Tensor = py.native - def add_n(ts: Seq[Tensor]): Tensor = py.native + def add_n(ts: PythonSeq[Tensor]): Tensor = py.native def square(t: Tensor): Tensor = py.native @@ -49,9 +50,9 @@ trait TensorFlow extends py.Object { def reduce_mean(t: Tensor): Tensor = py.native - def gradients(ys: Tensor | Seq[Tensor], xs: Tensor | Seq[Tensor]): Seq[Tensor] = py.native + def gradients(ys: Tensor | PythonSeq[Tensor], xs: Tensor | PythonSeq[Tensor]): Seq[Tensor] = py.native - def gradients(ys: Tensor, xs: Seq[Tensor], grad_ys: Tensor): Seq[Tensor] = py.native + def gradients(ys: Tensor, xs: PythonSeq[Tensor], grad_ys: Tensor): Seq[Tensor] = py.native def cond(c: Tensor, ifTrue: py.Object, ifFalse: py.Object): Tensor = py.native diff --git a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/compat/v1/Session.scala b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/compat/v1/Session.scala index 605c56b..2d316b3 100644 --- a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/compat/v1/Session.scala +++ b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/compat/v1/Session.scala @@ -1,15 +1,15 @@ package me.shadaj.scalapy.tensorflow.compat.v1 -import me.shadaj.scalapy.numpy._ +import me.shadaj.scalapy.numpy.{NDArray, PythonSeq} import me.shadaj.scalapy.py -import me.shadaj.scalapy.py.Writer +import me.shadaj.scalapy.readwrite.Writer import me.shadaj.scalapy.tensorflow.{Tensor, Variable} @py.native trait PythonDict[K, V] extends py.Object object PythonDict { implicit def mapToPythonDict[K, V](map: Map[K, V])(implicit writer: Writer[Map[K, V]]): PythonDict[K, V] = { - py.global.dict(map).as[PythonDict[K, V]] + py.Dynamic.global.dict(map).as[PythonDict[K, V]] } } @@ -25,5 +25,5 @@ trait Session extends py.Object { def run(fetches: Tensor, feedDict: PythonDict[Tensor, py.Object]): Seq[NDArray[Double]] = py.native - def run(fetches: Seq[Tensor], feedDict: PythonDict[Tensor, py.Object]): Seq[Seq[NDArray[Double]]] = py.native + def run(fetches: PythonSeq[Tensor], feedDict: PythonDict[Tensor, py.Object]): Seq[Seq[NDArray[Double]]] = py.native } diff --git a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/compat/v1/V1.scala b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/compat/v1/V1.scala index 20805ab..577b8b8 100644 --- a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/compat/v1/V1.scala +++ b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/compat/v1/V1.scala @@ -1,5 +1,6 @@ package me.shadaj.scalapy.tensorflow.compat.v1 +import me.shadaj.scalapy.numpy.PythonSeq import me.shadaj.scalapy.py import me.shadaj.scalapy.tensorflow.Tensor import me.shadaj.scalapy.tensorflow.scala.utils.PythonModule @@ -8,7 +9,7 @@ import me.shadaj.scalapy.tensorflow.scala.utils.PythonModule trait V1 extends py.Object with PythonModule { def placeholder(`type`: String): Tensor = py.native - def placeholder(`type`: String, shape: Seq[py.NoneOr[Int]]): Tensor = py.native + def placeholder(`type`: String, shape: PythonSeq[py.NoneOr[Int]]): Tensor = py.native def global_variables_initializer(): Operation = py.native diff --git a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/Keras.scala b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/Keras.scala index f93cc29..7548927 100644 --- a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/Keras.scala +++ b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/Keras.scala @@ -1,5 +1,6 @@ package me.shadaj.scalapy.tensorflow.keras +import me.shadaj.scalapy.numpy.PythonSeq import me.shadaj.scalapy.py import me.shadaj.scalapy.tensorflow.Tensor import me.shadaj.scalapy.tensorflow.keras.backend.Backend @@ -24,7 +25,7 @@ trait Keras extends py.Object with PythonModule { def preprocessing: Preprocessing = py.native def Input( - shape: Seq[py.NoneOr[Int]] = Seq(), + shape: PythonSeq[py.NoneOr[Int]] = Seq.empty[py.NoneOr[Int]].toPythonProxy.as[PythonSeq[py.NoneOr[Int]]], // batch_size: py.NoneOr[Int]=py.None, name: py.NoneOr[String] = py.None, dtype: py.NoneOr[String] = py.None, diff --git a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/datasets/Mnist.scala b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/datasets/Mnist.scala index 2c3a7e0..a99a526 100644 --- a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/datasets/Mnist.scala +++ b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/datasets/Mnist.scala @@ -1,7 +1,7 @@ package me.shadaj.scalapy.tensorflow.keras.datasets -import me.shadaj.scalapy.py import me.shadaj.scalapy.numpy.NDArray +import me.shadaj.scalapy.py @py.native trait Mnist extends py.Object { diff --git a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/layers/Layers.scala b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/layers/Layers.scala index 37b72ce..09970f2 100644 --- a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/layers/Layers.scala +++ b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/layers/Layers.scala @@ -4,9 +4,11 @@ import me.shadaj.scalapy.numpy.NDArray import me.shadaj.scalapy.py import me.shadaj.scalapy.tensorflow.Tensor import me.shadaj.scalapy.tensorflow.scala.utils.PythonModule +import me.shadaj.scalapy.tensorflow.compat.v1.{PythonDict, Session} @py.native trait Layers extends py.Object with PythonModule { + private val origDynamic = this.as[py.Dynamic] def Conv2D( filters: Int, kernel_size: py.|[Int, (Int, Int)], @@ -23,7 +25,7 @@ trait Layers extends py.Object with PythonModule { activity_regularizer: py.NoneOr[String] = py.None, kernel_constraint: py.NoneOr[String] = py.None, bias_constraint: py.NoneOr[String] = py.None, - kwargs: Map[String, py.Any] = Map() + kwargs: Map[String, py.Any] = Map.empty[String, py.Any] ): Conv2D = py.nativeNamed def Dropout(rate: Double, noise_shape: py.NoneOr[Tensor] = py.None, seed: py.NoneOr[Int] = py.None): Dropout = py.nativeNamed diff --git a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/layers/package.scala b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/layers/package.scala index 6acb43f..ce51ecc 100644 --- a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/layers/package.scala +++ b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/layers/package.scala @@ -1,8 +1,10 @@ package me.shadaj.scalapy.tensorflow.keras +import me.shadaj.scalapy.interpreter.PyValue + import scala.util.control.NonFatal -import me.shadaj.scalapy.py.{PyValue, Reader} import me.shadaj.scalapy.py +import me.shadaj.scalapy.readwrite.Reader package object layers { implicit val tupleReader: Reader[py.|[Int, (Int, Int)]] = new Reader[py.|[Int, (Int, Int)]] { diff --git a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/models/Models.scala b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/models/Models.scala index 37b23ca..f6ef744 100644 --- a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/models/Models.scala +++ b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/models/Models.scala @@ -1,10 +1,11 @@ package me.shadaj.scalapy.tensorflow.keras.models +import me.shadaj.scalapy.numpy.PythonSeq import me.shadaj.scalapy.py import me.shadaj.scalapy.tensorflow.scala.utils.PythonModule import me.shadaj.scalapy.tensorflow.keras.layers.Layer @py.native trait Models extends py.Object with PythonModule { - def Sequential(layers: Seq[Layer] = Seq()): Sequential = py.native + def Sequential(layers: PythonSeq[Layer] = Seq()): Sequential = py.native } diff --git a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/models/Sequential.scala b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/models/Sequential.scala index 29c8c68..7b5cc32 100644 --- a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/models/Sequential.scala +++ b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/models/Sequential.scala @@ -1,9 +1,9 @@ package me.shadaj.scalapy.tensorflow.keras package models +import me.shadaj.scalapy.numpy.{NDArray, PythonSeq} import me.shadaj.scalapy.py import me.shadaj.scalapy.tensorflow.keras.layers.Layer -import me.shadaj.scalapy.numpy.NDArray import me.shadaj.scalapy.py.PyFunction import me.shadaj.scalapy.tensorflow.keras.optimizers.Optimizer @@ -16,10 +16,10 @@ trait Sequential extends py.Object { def compile( optimizer: py.|[String, Optimizer] = "rmsprop", loss: py.NoneOr[PyFunction] = py.None, - metrics: Seq[String] = Seq.empty, - loss_weights: py.NoneOr[Seq[(Double, Double)]] = py.None, + metrics: PythonSeq[String] = PythonSeq.emptyString, + loss_weights: py.NoneOr[PythonSeq[(Double, Double)]] = py.None, sample_weight_mode: py.NoneOr[String] = py.None, - weighted_metrics: Seq[String] = Seq.empty, + weighted_metrics: PythonSeq[String] = PythonSeq.emptyString, target_tensors: py.NoneOr[String] = py.None ) = origDynamic diff --git a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/optimizers/Adadelta.scala b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/optimizers/Adadelta.scala index 49eb042..659d87e 100644 --- a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/optimizers/Adadelta.scala +++ b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/optimizers/Adadelta.scala @@ -1,7 +1,6 @@ package me.shadaj.scalapy.tensorflow.keras.optimizers import me.shadaj.scalapy.py -import me.shadaj.scalapy.py.{PyValue, Reader} @py.native trait Adadelta extends Optimizer diff --git a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/optimizers/Adam.scala b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/optimizers/Adam.scala index 93ee331..289d2f8 100644 --- a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/optimizers/Adam.scala +++ b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/optimizers/Adam.scala @@ -1,7 +1,6 @@ package me.shadaj.scalapy.tensorflow.keras.optimizers import me.shadaj.scalapy.py -import me.shadaj.scalapy.py.{PyValue, Reader} @py.native trait Adam extends Optimizer diff --git a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/optimizers/Optimizer.scala b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/optimizers/Optimizer.scala index 4651cc9..d0d7789 100644 --- a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/optimizers/Optimizer.scala +++ b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/optimizers/Optimizer.scala @@ -1,5 +1,6 @@ package me.shadaj.scalapy.tensorflow.keras.optimizers +import me.shadaj.scalapy.numpy.PythonSeq import me.shadaj.scalapy.py import me.shadaj.scalapy.tensorflow.compat.v1.Operation import me.shadaj.scalapy.tensorflow.{Tensor, Variable} @@ -7,7 +8,7 @@ import me.shadaj.scalapy.tensorflow.{Tensor, Variable} @py.native trait Optimizer extends py.Object { // TODO loss should be a function () => py.Any - def minimize(loss: py.Any, var_list: Seq[Variable]): Operation = py.native + def minimize(loss: py.Any, var_list: PythonSeq[Variable]): Operation = py.native - def apply_gradients(grads_and_vars: Seq[(Tensor, Variable)]): Operation = py.native + def apply_gradients(grads_and_vars: PythonSeq[(Tensor, Variable)]): Operation = py.native } diff --git a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/optimizers/SGD.scala b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/optimizers/SGD.scala index e480497..a6204e6 100644 --- a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/optimizers/SGD.scala +++ b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/optimizers/SGD.scala @@ -1,7 +1,6 @@ package me.shadaj.scalapy.tensorflow.keras.optimizers import me.shadaj.scalapy.py -import me.shadaj.scalapy.py.{PyValue, Reader} @py.native trait SGD extends Optimizer diff --git a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/utils/Utils.scala b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/utils/Utils.scala index 7242cc4..9a904a6 100644 --- a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/utils/Utils.scala +++ b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/keras/utils/Utils.scala @@ -1,7 +1,7 @@ package me.shadaj.scalapy.tensorflow.keras.utils -import me.shadaj.scalapy.py import me.shadaj.scalapy.numpy.NDArray +import me.shadaj.scalapy.py import me.shadaj.scalapy.tensorflow.scala.utils.PythonModule @py.native diff --git a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/package.scala b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/package.scala index bdff239..552ea83 100644 --- a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/package.scala +++ b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/package.scala @@ -1,7 +1,9 @@ package me.shadaj.scalapy +import me.shadaj.scalapy.interpreter.PyValue import me.shadaj.scalapy.numpy.NDArray -import me.shadaj.scalapy.py.Writer +import me.shadaj.scalapy.readwrite.{Reader, Writer} +import me.shadaj.scalapy.py.SeqConverters import scala.language.implicitConversions @@ -14,7 +16,17 @@ package object tensorflow { nd.as[Tensor] } + implicit def ndd2Tensor(nd: NDArray[Double]): Tensor = { + nd.as[Tensor] + } + implicit def seq2Tensor(s: Seq[Int]): Tensor = { - py.Any.from(s)(Writer.seqWriter).as[Tensor] + s.toPythonProxy.as[Tensor] } + } + + + + + diff --git a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/random/Random.scala b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/random/Random.scala index d2c00f3..b194aec 100644 --- a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/random/Random.scala +++ b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/random/Random.scala @@ -3,6 +3,7 @@ package me.shadaj.scalapy.tensorflow.random import me.shadaj.scalapy.py import me.shadaj.scalapy.tensorflow.scala.utils.PythonModule import me.shadaj.scalapy.tensorflow.Tensor +import me.shadaj.scalapy.tensorflow.seq2Tensor @py.native trait Random extends py.Object with PythonModule { diff --git a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/train/ExponentialMovingAverage.scala b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/train/ExponentialMovingAverage.scala index 1b353c8..a00ccc6 100644 --- a/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/train/ExponentialMovingAverage.scala +++ b/tensorflow/src/main/scala/me/shadaj/scalapy/tensorflow/train/ExponentialMovingAverage.scala @@ -1,7 +1,6 @@ package me.shadaj.scalapy.tensorflow.train import me.shadaj.scalapy.py -import me.shadaj.scalapy.py.{PyValue, Reader} @py.native trait ExponentialMovingAverage extends py.Object