Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Scalapy update #56

Draft
wants to merge 2 commits into
base: develop
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ lazy val scala211Version = "2.11.12"
lazy val scala212Version = "2.12.11"
lazy val scala213Version = "2.13.2"

lazy val dottyVersion = "0.26.0-RC1"
lazy val dottyVersion = "3.0.0-M3"

organization in ThisBuild := "me.shadaj"
scalaVersion in ThisBuild := scala212Version
Expand Down Expand Up @@ -43,7 +43,7 @@ lazy val dottyTensorFlow = project
.settings(
name := "dotty-tensorflow",
scalaVersion := dottyVersion,
libraryDependencies += "com.github.VirtuslabRnD.scalapy" % "scalapy-core_2.13" % "b3d8ddc81753a72d11e46601f7a9ad719e452e5c",
libraryDependencies += "me.shadaj" % "scalapy-core_2.13" % "0.4.0+11-aea09719",
fork := true,
javaOptions += s"-Djna.library.path=${"python3-config --prefix".!!.trim}/lib",
projectDependencies ~=(_.map(_.withDottyCompat(dottyVersion))),
Expand All @@ -68,8 +68,8 @@ lazy val scalaPyTensorFlowCross = crossProject(JVMPlatform, NativePlatform)
.settings(
name := "scalapy-tensorflow-cross",
// scalapy-core version will replace the one in scalapy-numpy (maintaining binary compatibility)
libraryDependencies += "com.github.VirtuslabRnD.scalapy" %%% "scalapy-core" % "b3d8ddc81753a72d11e46601f7a9ad719e452e5c",
libraryDependencies += "com.github.VirtuslabRnD.scalapy-numpy" %%% "scalapy-numpy" % "8a85b7068e9a377ba4b97e2cf9b7bbe008066202" exclude("me.shadaj", "scalapy-core"),
libraryDependencies += "me.shadaj" %%% "scalapy-core" % "0.4.0+11-aea09719",
libraryDependencies += "me.shadaj" %%% "scalapy-numpy" % "0.1.0+13-442717a6+20210113-0045" excludeAll("com.github.shadaj.scalapy", "scalapy-core_2.13"),
projectDependencies ~=(_.map(_.withDottyCompat(dottyVersion))),
).jvmSettings(
scalaVersion := scala213Version,
Expand Down Expand Up @@ -122,7 +122,7 @@ lazy val scalaPyTensorFlowExamplesCross = crossProject(JVMPlatform, NativePlatfo
.dependsOn(scalaPyTensorFlowCross)

lazy val scalaPyTensorFlowExamplesJVM = scalaPyTensorFlowExamplesCross.jvm.settings(name := "tensorflow-example-jvm")
lazy val scalaPyTensorFlowExamplesNative = scalaPyTensorFlowExamplesCross.native.settings(name := "tensorflow-example-native")
lazy val scalaPyTensorFlowExamplesNative = scalaPyTensorFlowExamplesCross.native.settings(name := "tensorflow-example-native")


// To make sure that changes to project structure are picked up by sbt without an explicit `reload`
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package me.shadaj.scalapy.tensorflow.example

import me.shadaj.scalapy.numpy.{NDArray, PythonSeq}
import me.shadaj.scalapy.py
import me.shadaj.scalapy.tensorflow.api.TensorFlow
import me.shadaj.scalapy.tensorflow.nd2Tensor
Expand All @@ -8,6 +9,24 @@ import me.shadaj.scalapy.tensorflow.api.keras.optimizers.OptimizerEnum
import me.shadaj.scalapy.tensorflow.scala.utils.Modules.{numpy => np}
import me.shadaj.scalapy.tensorflow.api.keras.metrics.Metric
import me.shadaj.scalapy.tensorflow.api.keras.activations.Activation
import me.shadaj.scalapy.py.SeqConverters
import me.shadaj.scalapy.tensorflow.ndd2Tensor
import me.shadaj.scalapy.py
import me.shadaj.scalapy.py.Dynamic.global
import me.shadaj.scalapy.readwrite._
import me.shadaj.scalapy.readwrite.Reader._
import me.shadaj.scalapy.py
import me.shadaj.scalapy.tensorflow.TensorFlow
import me.shadaj.scalapy.tensorflow.scala.utils.Modules
import me.shadaj.scalapy.tensorflow.nd2Tensor
import me.shadaj.scalapy.py.SeqConverters
import me.shadaj.scalapy.tensorflow.scala.utils.Modules._
import me.shadaj.scalapy.numpy.{NDArray, PythonSeq}

import Int.int2long
import scala.language.implicitConversions


import Int.int2long
import scala.language.implicitConversions

Expand All @@ -33,10 +52,10 @@ object BidirectionalLSTMExample extends Runnable {
println(s"${xTest.length} test sequences")

println("Pad sequences (samples x time)")
val xTrain1 = sequence.padSequences(xTrain, maxLen = Some(maxLen)).astype(np.float32)
val xTest1 = sequence.padSequences(xTest, maxLen = Some(maxLen)).astype(np.float32)
val yTrain1 = yTrain.astype(np.float32)
val yTest1 = yTest.astype(np.float32)
val xTrain1 = sequence.padSequences(xTrain, maxLen = Some(maxLen)).astype(np.float32).as[NDArray[Float]]
val xTest1 = sequence.padSequences(xTest, maxLen = Some(maxLen)).astype(np.float32).as[NDArray[Float]]
val yTrain1 = yTrain.astype(np.float32).as[NDArray[Float]]
val yTest1 = yTest.astype(np.float32).as[NDArray[Float]]

println(s"xTrain shape: ${xTrain1.shape}")
println(s"xTest shape: ${xTest1.shape}")
Expand All @@ -48,8 +67,8 @@ object BidirectionalLSTMExample extends Runnable {
layers.Dense(1, activation = Some(Activation.Sigmoid))
))

model.compile(OptimizerEnum.Adam, Some(keras1.backend.binaryCrossentropy), metrics = Seq(Metric.Accuracy))

model.compile(OptimizerEnum.Adam, Some(keras1.backend.binaryCrossentropy), metrics = Seq[String]("accuracy"))
println("Train...")
val epochs = Option(System.getenv("EPOCH_COUNT")).map(_.toInt).getOrElse(2)
model.fit(xTrain1, yTrain1, batchSize = Some(batchSize), epochs = epochs, validationData = Some((xTest1, yTest1)))
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,17 @@
package me.shadaj.scalapy.tensorflow.example

import me.shadaj.scalapy.numpy.{NDArray, PythonSeq}
import me.shadaj.scalapy.py
import me.shadaj.scalapy.tensorflow.api.Tensor
import me.shadaj.scalapy.py.SeqConverters
import me.shadaj.scalapy.numpy.PythonSeq.seqToPythonSeq
import me.shadaj.scalapy.tensorflow.{Tensor => PyTensor}
import me.shadaj.scalapy.tensorflow.{Variable => PyVariable}
import me.shadaj.scalapy.tensorflow.api.scalaUtils.CloseableResourceManager
import me.shadaj.scalapy.tensorflow.api.Tensor.{TensorToPyTensor}
import me.shadaj.scalapy.tensorflow.api.Tensor.TensorToPyTensor
import me.shadaj.scalapy.tensorflow.{nd2Tensor => nd2TensorPy}
import me.shadaj.scalapy.tensorflow.api.{TensorFlow => tf}
import me.shadaj.scalapy.tensorflow.api.{Variable, Tensor, TensorFlow => tf}
import me.shadaj.scalapy.tensorflow.scala.utils.Modules.{numpy => np}

import scala.language.implicitConversions

/**
Expand All @@ -21,7 +26,7 @@ object GradientDescentOptimizerExample extends Runnable {

def run(): Unit = {
// Starting data
val xData = np.random.rand(100).astype(np.float32)
val xData = np.random.rand(100).astype(np.float32).as[NDArray[Float]]
val yData = (xData * 0.1f) + 0.3f

// Variables
Expand All @@ -35,11 +40,11 @@ object GradientDescentOptimizerExample extends Runnable {
def loss = () => tf.reduceMean(tf.square(y() - yData))

// Function to calculate gradients
def grad(): Option[(Tensor, Seq[Tensor])] =
def grad(): Option[(PyTensor, Seq[PyTensor])] =
CloseableResourceManager.withResource(tf.GradientTape()) { tape =>
val lossValue = loss()
val gradients: Seq[Tensor] = tape.gradient(lossValue, Seq(W, b))
(lossValue, gradients)
(lossValue.underlying, gradients.map(_.underlying))
}

// Select optimizer SGD
Expand All @@ -52,7 +57,9 @@ object GradientDescentOptimizerExample extends Runnable {
val num_epochs = Option(System.getenv("EPOCH_COUNT")).map(_.toInt).getOrElse(400)
for (epoch <- 1 to num_epochs) {
val (lossValue, grads) = grad().get
optimizer.applyGradients(grads.zip(Seq(W, b)))
val aa = grads.zip(Seq(W.underlying, b.underlying))
optimizer.applyGradients(aa)
//optimizer.applyGradients(grads.zip(Seq(W, b)))
if (epoch % 50 == 0)
println(s"Epoch ${epoch}: Loss: ${lossValue.numpy()}")
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
package me.shadaj.scalapy.tensorflow.example

import me.shadaj.scalapy.py
import me.shadaj.scalapy.py.{Any, Dynamic, PyQuote, SeqConverters, local}
import me.shadaj.scalapy.readwrite.Reader._


object LambdaExample extends Runnable {
def run(): Unit = {
println("Hello, World")
val listLengthPython = py.Dynamic.global.len(List(1, 2, 3).toPythonProxy)

local {
var count = 0
val testLambda = Any.from(() => {
count += 1
s"count: $count"
})

val testLambda2 = Any.from((x: Seq[Int]) => x.sum)

assert(py"$testLambda()".as[String] == "count: 1")
assert(py"$testLambda()".as[String] == "count: 2")
assert(py"$testLambda2([1, 2, 3])".as[Int] == 6)
}

val lambdaToScala = Dynamic.global.len.as[Any => Int]
assert(lambdaToScala(Seq[Any]().toPythonProxy) == 0)
assert(lambdaToScala(Seq(1, 2, 3).toPythonProxy) == 3)

/*@py.native trait PyString extends py.Object {
def count(subsequence: String): Int = py.native
}*/

/*{
@py.native trait PythonRandomModule extends py.Object {
def Random(a: String, s: Seq[Int]): py.Dynamic = py.native
}

val random = py.module("random").as[PythonRandomModule]
//println(random.Random("123", 4))

val string1 = py.module("string").digits.as[PyString]
val string2 = py.module("string").digits.as[PyString]
// string: PyString = 0123456789
println(string1.count("123"))
}*/
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ object Main {
case Array("BidirectionalLSTMExample") => BidirectionalLSTMExample
case Array("GradientDescentOptimizerExample") => GradientDescentOptimizerExample
case Array("MnistExample") => MnistExample
case Array("LambdaExample") => LambdaExample
case _ => throw new IllegalArgumentException("usage: sbt 'run <simple class name>'")
}).run()
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
package me.shadaj.scalapy.tensorflow.example

import me.shadaj.scalapy.numpy.NumPy
import me.shadaj.scalapy.numpy.{NDArray, NumPy, PythonSeq}
import me.shadaj.scalapy.py
import me.shadaj.scalapy.tensorflow.scala.utils.Modules._
import me.shadaj.scalapy.tensorflow.api.{TensorFlow => tf}
import me.shadaj.scalapy.tensorflow.api.keras.datasets.Mnist
import me.shadaj.scalapy.tensorflow.api.keras.models._
import me.shadaj.scalapy.tensorflow.api.keras.metrics.Metric
import me.shadaj.scalapy.tensorflow.api.keras.activations.Activation
import me.shadaj.scalapy.py.SeqConverters

import Int.int2long
import scala.language.implicitConversions

Expand All @@ -28,8 +30,8 @@ object MnistExample extends Runnable {
val mnist: Mnist = kerasA.datasets.mnist
val ((xTrainOrig, yTrainOrig), (xTest, yTest)) = mnist.loadData()
val trainingSetSize = Option(System.getenv("TRAINING_SET_SIZE")).map(_.toInt)
val xTrain = trainingSetSize.map(tss => xTrainOrig.slice(0, tss)).getOrElse(xTrainOrig)
val yTrain = trainingSetSize.map(tss => yTrainOrig.slice(0, tss)).getOrElse(yTrainOrig)
val xTrain = trainingSetSize.map(tss => xTrainOrig.slice(0, tss)).getOrElse(xTrainOrig).asInstanceOf[NDArray[Long]]
val yTrain = trainingSetSize.map(tss => yTrainOrig.slice(0, tss)).getOrElse(yTrainOrig).asInstanceOf[NDArray[Long]]

val (train, test, inputShape) =
if (K.imageDataFormat() == "channels_first") {
Expand All @@ -46,8 +48,8 @@ object MnistExample extends Runnable {
(train, test, inputShape)
}

val trainImages = train.astype(np.float32) / 255.0f
val testImages = test.astype(np.float32) / 255.0f
val trainImages = train.astype(np.float32).as[NDArray[Float]] / 255.0f
val testImages = test.astype(np.float32).as[NDArray[Float]] / 255.0f

println(s"xTrain shape: ${trainImages.shape}")
println(s"${trainImages.shape(0)} train samples")
Expand All @@ -70,12 +72,13 @@ object MnistExample extends Runnable {
model.compile(
loss = Some(kerasA.losses.categoricalCrossentropy),
optimizer = kerasA.optimizers.Adadelta(),
metrics = Seq(Metric.Accuracy)
metrics = Seq[String](Metric.Accuracy)
)

model.fit(x = trainImages, y = trainLabels, batchSize = Some(batchSize), epochs = epochs, verbose = 1, validationData = Some((testImages, testLabels)))
model.fit(x = trainImages, y = trainLabels.as[NDArray[Float]], batchSize = Some(batchSize), epochs = epochs, verbose = 1,
validationData = Some((testImages.as[NDArray[Float]], testLabels.as[NDArray[Float]])))

val score = model.evaluate(x = testImages, y = testLabels, verbose = 0)
val score = model.evaluate(x = testImages, y = testLabels.as[NDArray[Float]], verbose = 0)

println(s"Test loss: ${score(0)}")
println(s"Test accuracy: ${score(1)}")
Expand Down
2 changes: 1 addition & 1 deletion dotty-tensorflow/project/build.properties
Original file line number Diff line number Diff line change
@@ -1 +1 @@
sbt.version=1.3.13
sbt.version=1.4.6
2 changes: 2 additions & 0 deletions dotty-tensorflow/project/plugins.sbt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@

addSbtPlugin("ch.epfl.lamp" % "sbt-dotty" % "0.5.1")
Original file line number Diff line number Diff line change
@@ -1,19 +1,26 @@
package me.shadaj.scalapy.tensorflow.api

import me.shadaj.scalapy.tensorflow.scala.utils.Modules.{tensorflow => pyTensorflow}
import me.shadaj.scalapy.tensorflow.{TensorFlow => PyTensorFlow, Tensor => PyTensor}
import me.shadaj.scalapy.tensorflow.{TensorFlow => PyTensorFlow}
import me.shadaj.scalapy.numpy.PythonSeq.seqToPythonSeq
import me.shadaj.scalapy.tensorflow.seq2Tensor
import random.Random
import keras.Keras
import compat.Compat
import nn.NN
import train.Train

import scala.language.implicitConversions
import scalaUtils.PythonOption._
import scalaUtils.PythonUnion._
import me.shadaj.scalapy.tensorflow.seq2Tensor

import me.shadaj.scalapy.py.PyFunction
import me.shadaj.scalapy.py
import me.shadaj.scalapy.py.|.fromLeft
import me.shadaj.scalapy.py.|.fromRight
import me.shadaj.scalapy.tensorflow.api.scalaUtils.PythonUnion.fromSingleAndTupleUnion
import me.shadaj.scalapy.tensorflow.api.Tensor.TensorToPyTensor
import me.shadaj.scalapy.tensorflow.api.Tensor.PyTensorToTensor


object TensorFlow {
private val tf: PyTensorFlow = pyTensorflow
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,10 @@ package me.shadaj.scalapy.tensorflow.api.keras.layers
import me.shadaj.scalapy.tensorflow.keras.layers.{Conv2D => PyConv2D}
import me.shadaj.scalapy.tensorflow.api.scalaUtils.PythonUnion._
import me.shadaj.scalapy.tensorflow.api.scalaUtils.PythonOption._

import scala.language.implicitConversions
import me.shadaj.scalapy.tensorflow.api.scalaUtils
import me.shadaj.scalapy.py.|

class Conv2D private[api] (override val underlying: PyConv2D) extends Layer(underlying) {
def filters: Int = underlying.filters
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,22 @@ import me.shadaj.scalapy.tensorflow.api.scalaUtils.PythonOption._
import me.shadaj.scalapy.tensorflow.api.scalaUtils.PythonUnion._
import me.shadaj.scalapy.tensorflow.scala.utils.Modules.{numpy => np}
import me.shadaj.scalapy.tensorflow.api.scalaUtils.PythonEnum._
import me.shadaj.scalapy.tensorflow.api.scalaUtils.PythonEnum
import me.shadaj.scalapy.tensorflow.api.keras.layers._
import me.shadaj.scalapy.tensorflow.api.keras.optimizers._
import me.shadaj.scalapy.numpy.NDArray
import me.shadaj.scalapy.py.PyFunction

import scala.language.implicitConversions
import me.shadaj.scalapy.numpy.PythonSeq
import me.shadaj.scalapy.py
import me.shadaj.scalapy.py.SeqConverters
import me.shadaj.scalapy.tensorflow.TensorFlow
import me.shadaj.scalapy.tensorflow.nd2Tensor
import me.shadaj.scalapy.py.SeqConverters
import me.shadaj.scalapy.tensorflow.scala.utils.Modules._
import me.shadaj.scalapy.numpy.{NDArray, PythonSeq}
import me.shadaj.scalapy.tensorflow.api.scalaUtils.PythonOption.toScalaOption
import me.shadaj.scalapy.readwrite.Writer

class Sequential private[api] (val underlying: PySequential) extends PythonType[PySequential] {

Expand All @@ -19,11 +29,11 @@ class Sequential private[api] (val underlying: PySequential) extends PythonType[
def compile(
optimizer: OptimizerEnum | Optimizer = OptimizerEnum.RMSprop,
loss: Option[PyFunction] = None,
metrics: Seq[String] = Seq.empty,
lossWeights: Option[Seq[(Double, Double)]] = None,
sampleWeightMode: Option[String] = None,
weightedMetrics: Seq[String] = Seq.empty,
targetTensors: Option[String] = None
metrics: PythonSeq[String] = PythonSeq.emptyString,
lossWeights: py.NoneOr[PythonSeq[(Double, Double)]] = py.None,
sampleWeightMode: py.NoneOr[String] = None,
weightedMetrics: PythonSeq[String] = PythonSeq.emptyString,
targetTensors: py.NoneOr[String] = None
) =
underlying.compile(
optimizer,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,21 @@ package me.shadaj.scalapy.tensorflow.api.keras.optimizers

import me.shadaj.scalapy.tensorflow.compat.v1.Operation
import me.shadaj.scalapy.tensorflow.api.{Tensor, Variable}

import me.shadaj.scalapy.tensorflow.{Variable => PyVariable}
import me.shadaj.scalapy.tensorflow.{Tensor => PyTensor}
import me.shadaj.scalapy.tensorflow.keras.optimizers.{Optimizer => PyOptimizer}
import me.shadaj.scalapy.tensorflow.api.scalaUtils.PythonType
import me.shadaj.scalapy.py.SeqConverters
import me.shadaj.scalapy.numpy.PythonSeq

import scala.language.implicitConversions
import me.shadaj.scalapy.py

trait Optimizer private[api] (val underlying: PyOptimizer) extends PythonType[PyOptimizer] {
// TODO loss should be a function () => py.Any
def minimize(loss: py.Any, varList: Seq[Variable]): Operation = underlying.minimize(loss, varList.map(_.underlying))

def applyGradients(gradsAndVars: Seq[(Tensor, Variable)]): Operation =
underlying.apply_gradients(gradsAndVars.map {
case (tensor, variable) => (tensor.underlying, variable.underlying)
})
def applyGradients(gradsAndVars: PythonSeq[(PyTensor, PyVariable)]): Operation = {
underlying.apply_gradients(gradsAndVars)
}
}
Loading