diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/NeuralStyle.scala b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/NeuralStyle.scala index 22a1d269a9bf..e11c1c6d7027 100644 --- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/NeuralStyle.scala +++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/NeuralStyle.scala @@ -155,104 +155,117 @@ object NeuralStyle { Math.sqrt(array.map(x => x * x).sum.toDouble).toFloat } - def main(args: Array[String]): Unit = { - val alle = new NeuralStyle - val parser: CmdLineParser = new CmdLineParser(alle) - try { - parser.parseArgument(args.toList.asJava) - assert(alle.contentImage != null && alle.styleImage != null - && alle.modelPath != null && alle.outputDir != null) + //scalastyle:off + def runTraining(model : String, contentImage : String, styleImage: String, dev : Context, + modelPath : String, outputDir : String, styleWeight : Float, + contentWeight : Float, tvWeight : Float, gaussianRadius : Int, + lr: Float, maxNumEpochs: Int, maxLongEdge: Int, + saveEpochs : Int, stopEps: Float) : Unit = { + + val contentNp = preprocessContentImage(contentImage, maxLongEdge, dev) + val styleNp = preprocessStyleImage(styleImage, contentNp.shape, dev) + val size = (contentNp.shape(2), contentNp.shape(3)) + + val (style, content) = ModelVgg19.getSymbol + val (gram, gScale) = styleGramSymbol(size, style) + var modelExecutor = ModelVgg19.getExecutor(gram, content, modelPath, size, dev) + + modelExecutor.data.set(styleNp) + modelExecutor.executor.forward() + + val styleArray = modelExecutor.style.map(_.copyTo(Context.cpu())) + modelExecutor.data.set(contentNp) + modelExecutor.executor.forward() + val contentArray = modelExecutor.content.copyTo(Context.cpu()) + + // delete the executor + modelExecutor = null + + val (styleLoss, contentLoss) = getLoss(gram, content) + modelExecutor = ModelVgg19.getExecutor( + styleLoss, contentLoss, modelPath, size, dev) + + val gradArray = { + var tmpGA = Array[NDArray]() + for (i <- 0 until styleArray.length) { + modelExecutor.argDict(s"target_gram_$i").set(styleArray(i)) + tmpGA = tmpGA :+ NDArray.ones(Shape(1), dev) * (styleWeight / gScale(i)) + } + tmpGA :+ NDArray.ones(Shape(1), dev) * contentWeight + } - val dev = if (alle.gpu >= 0) Context.gpu(alle.gpu) else Context.cpu(0) - val contentNp = preprocessContentImage(alle.contentImage, alle.maxLongEdge, dev) - val styleNp = preprocessStyleImage(alle.styleImage, contentNp.shape, dev) - val size = (contentNp.shape(2), contentNp.shape(3)) + modelExecutor.argDict("target_content").set(contentArray) - val (style, content) = ModelVgg19.getSymbol - val (gram, gScale) = styleGramSymbol(size, style) - var modelExecutor = ModelVgg19.getExecutor(gram, content, alle.modelPath, size, dev) + // train + val img = Random.uniform(-0.1f, 0.1f, contentNp.shape, dev) + val lrFS = new FactorScheduler(step = 10, factor = 0.9f) - modelExecutor.data.set(styleNp) - modelExecutor.executor.forward() + saveImage(contentNp, s"${outputDir}/input.jpg", gaussianRadius) + saveImage(styleNp, s"${outputDir}/style.jpg", gaussianRadius) - val styleArray = modelExecutor.style.map(_.copyTo(Context.cpu())) - modelExecutor.data.set(contentNp) - modelExecutor.executor.forward() - val contentArray = modelExecutor.content.copyTo(Context.cpu()) + val optimizer = new Adam( + learningRate = lr, + wd = 0.005f, + lrScheduler = lrFS) + val optimState = optimizer.createState(0, img) - // delete the executor - modelExecutor = null + logger.info(s"start training arguments") - val (styleLoss, contentLoss) = getLoss(gram, content) - modelExecutor = ModelVgg19.getExecutor( - styleLoss, contentLoss, alle.modelPath, size, dev) + var oldImg = img.copyTo(dev) + val clipNorm = img.shape.toVector.reduce(_ * _) + val tvGradExecutor = getTvGradExecutor(img, dev, tvWeight) + var eps = 0f + var trainingDone = false + var e = 0 + while (e < maxNumEpochs && !trainingDone) { + modelExecutor.data.set(img) + modelExecutor.executor.forward() + modelExecutor.executor.backward(gradArray) - val gradArray = { - var tmpGA = Array[NDArray]() - for (i <- 0 until styleArray.length) { - modelExecutor.argDict(s"target_gram_$i").set(styleArray(i)) - tmpGA = tmpGA :+ NDArray.ones(Shape(1), dev) * (alle.styleWeight / gScale(i)) - } - tmpGA :+ NDArray.ones(Shape(1), dev) * alle.contentWeight + val gNorm = NDArray.norm(modelExecutor.dataGrad).toScalar + if (gNorm > clipNorm) { + modelExecutor.dataGrad.set(modelExecutor.dataGrad * (clipNorm / gNorm)) } - - modelExecutor.argDict("target_content").set(contentArray) - - // train - val img = Random.uniform(-0.1f, 0.1f, contentNp.shape, dev) - val lr = new FactorScheduler(step = 10, factor = 0.9f) - - saveImage(contentNp, s"${alle.outputDir}/input.jpg", alle.guassianRadius) - saveImage(styleNp, s"${alle.outputDir}/style.jpg", alle.guassianRadius) - - val optimizer = new Adam( - learningRate = alle.lr, - wd = 0.005f, - lrScheduler = lr) - val optimState = optimizer.createState(0, img) - - logger.info(s"start training arguments $alle") - - var oldImg = img.copyTo(dev) - val clipNorm = img.shape.toVector.reduce(_ * _) - val tvGradExecutor = getTvGradExecutor(img, dev, alle.tvWeight) - var eps = 0f - var trainingDone = false - var e = 0 - while (e < alle.maxNumEpochs && !trainingDone) { - modelExecutor.data.set(img) - modelExecutor.executor.forward() - modelExecutor.executor.backward(gradArray) - - val gNorm = NDArray.norm(modelExecutor.dataGrad).toScalar - if (gNorm > clipNorm) { - modelExecutor.dataGrad.set(modelExecutor.dataGrad * (clipNorm / gNorm)) - } - tvGradExecutor match { - case Some(executor) => { - executor.forward() - optimizer.update(0, img, - modelExecutor.dataGrad + executor.outputs(0), - optimState) - } - case None => - optimizer.update(0, img, modelExecutor.dataGrad, optimState) + tvGradExecutor match { + case Some(executor) => { + executor.forward() + optimizer.update(0, img, + modelExecutor.dataGrad + executor.outputs(0), + optimState) } - eps = (NDArray.norm(oldImg - img) / NDArray.norm(img)).toScalar - oldImg.set(img) - logger.info(s"epoch $e, relative change $eps") + case None => + optimizer.update(0, img, modelExecutor.dataGrad, optimState) + } + eps = (NDArray.norm(oldImg - img) / NDArray.norm(img)).toScalar + oldImg.set(img) + logger.info(s"epoch $e, relative change $eps") - if (eps < alle.stopEps) { - logger.info("eps < args.stop_eps, training finished") - trainingDone = true - } - if ((e + 1) % alle.saveEpochs == 0) { - saveImage(img, s"${alle.outputDir}/tmp_${e + 1}.jpg", alle.guassianRadius) - } - e = e + 1 + if (eps < stopEps) { + logger.info("eps < args.stop_eps, training finished") + trainingDone = true + } + if ((e + 1) % saveEpochs == 0) { + saveImage(img, s"${outputDir}/tmp_${e + 1}.jpg", gaussianRadius) } - saveImage(img, s"${alle.outputDir}/out.jpg", alle.guassianRadius) - logger.info("Finish fit ...") + e = e + 1 + } + saveImage(img, s"${outputDir}/out.jpg", gaussianRadius) + logger.info("Finish fit ...") + } + + def main(args: Array[String]): Unit = { + val alle = new NeuralStyle + val parser: CmdLineParser = new CmdLineParser(alle) + try { + parser.parseArgument(args.toList.asJava) + assert(alle.contentImage != null && alle.styleImage != null + && alle.modelPath != null && alle.outputDir != null) + + val dev = if (alle.gpu >= 0) Context.gpu(alle.gpu) else Context.cpu(0) + runTraining(alle.model, alle.contentImage, alle.styleImage, dev, alle.modelPath, + alle.outputDir, alle.styleWeight, alle.contentWeight, alle.tvWeight, + alle.gaussianRadius, alle.lr, alle.maxNumEpochs, alle.maxLongEdge, + alle.saveEpochs, alle.stopEps) } catch { case ex: Exception => { logger.error(ex.getMessage, ex) @@ -292,6 +305,6 @@ class NeuralStyle { private val outputDir: String = null @Option(name = "--save-epochs", usage = "save the output every n epochs") private val saveEpochs: Int = 50 - @Option(name = "--guassian-radius", usage = "the gaussian blur filter radius") - private val guassianRadius: Int = 1 + @Option(name = "--gaussian-radius", usage = "the gaussian blur filter radius") + private val gaussianRadius: Int = 1 } diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/README.md b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/README.md new file mode 100644 index 000000000000..fe849343c9d7 --- /dev/null +++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/README.md @@ -0,0 +1,83 @@ +# Neural Style Example for Scala + +## Introduction +This model contains three important components: +- Boost Inference +- Boost Training +- Neural Style conversion + +You can use the prebuilt VGG model to do the conversion. +By adding a style image, you can create several interesting images. + +Original Image | Style Image +:-------------------------:|:-------------------------: +![](https://s3.us-east-2.amazonaws.com/mxnet-scala/scala-example-ci/NeuralStyle/IMG_4343.jpg) | ![](https://s3.us-east-2.amazonaws.com/mxnet-scala/scala-example-ci/NeuralStyle/starry_night.jpg) + +Boost Inference Image (pretrained) | Epoch 150 Image +:-------------------------:|:-------------------------: +![](https://s3.us-east-2.amazonaws.com/mxnet-scala/scala-example-ci/NeuralStyle/out_3.jpg) | ![](https://s3.us-east-2.amazonaws.com/mxnet-scala/scala-example-ci/NeuralStyle/tmp_150.jpg) + +## Setup +Please download the input image and style image following the links below: + +Input image +```bash +https://s3.us-east-2.amazonaws.com/mxnet-scala/scala-example-ci/NeuralStyle/IMG_4343.jpg +``` +Style image +```bash +https://s3.us-east-2.amazonaws.com/mxnet-scala/scala-example-ci/NeuralStyle/starry_night.jpg +``` + +VGG model --Boost inference +```bash +https://s3.us-east-2.amazonaws.com/mxnet-scala/scala-example-ci/NeuralStyle/model.zip +``` + +VGG model --Boost Training +```bash +https://s3.us-east-2.amazonaws.com/mxnet-scala/scala-example-ci/NeuralStyle/vgg19.params +``` + +Please unzip the model before you use it. + +## Boost Inference Example + +Please provide the corresponding arguments before you execute the program +```bash +--input-image +/IMG_4343.jpg +--model-path +/model +--output-path + +``` + +## Boost Training Example +Please download your own training data for boost training. +You can use 26k images sampled from [MIT Place dataset](http://places.csail.mit.edu/). +```bash +--style-image +/starry_night.jpg +--data-path +/images +--vgg-model-path +/vgg19.params +--save-model-path + +``` + +## NeuralStyle Example +Please provide the corresponding arguments before you execute the program +```bash +--model-path +/vgg19.params +--content-image +/IMG_4343.jpg +--style-image +/starry_night.jpg +--gpu + +--output-dir + +``` \ No newline at end of file diff --git a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/end2end/BoostTrain.scala b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/end2end/BoostTrain.scala index eb7007a1ce4f..08b4c85d2c55 100644 --- a/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/end2end/BoostTrain.scala +++ b/scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/end2end/BoostTrain.scala @@ -54,130 +54,135 @@ object BoostTrain { out.bind(ctx, Map("img" -> img, "kernel" -> kernel)) } - def main(args: Array[String]): Unit = { - val stin = new BoostTrain - val parser: CmdLineParser = new CmdLineParser(stin) - try { - parser.parseArgument(args.toList.asJava) - assert(stin.dataPath != null - && stin.vggModelPath != null - && stin.saveModelPath != null - && stin.styleImage != null) - // params - val vggParams = NDArray.load2Map(stin.vggModelPath) - val styleWeight = 1.2f - val contentWeight = 10f - val dShape = Shape(1, 3, 384, 384) - val clipNorm = 0.05f * dShape.product - val modelPrefix = "v3" - val ctx = if (stin.gpu == -1) Context.cpu() else Context.gpu(stin.gpu) - - // init style - val styleNp = DataProcessing.preprocessStyleImage(stin.styleImage, dShape, ctx) - var styleMod = Basic.getStyleModule("style", dShape, ctx, vggParams) - styleMod.forward(Array(styleNp)) - val styleArray = styleMod.getOutputs().map(_.copyTo(Context.cpu())) - styleMod.dispose() - styleMod = null - - // content - val contentMod = Basic.getContentModule("content", dShape, ctx, vggParams) - - // loss - val (loss, gScale) = Basic.getLossModule("loss", dShape, ctx, vggParams) - val extraArgs = (0 until styleArray.length) - .map( i => s"target_gram_$i" -> styleArray(i)).toMap - loss.setParams(extraArgs) - var gradArray = Array[NDArray]() - for (i <- 0 until styleArray.length) { - gradArray = gradArray :+ (NDArray.ones(Shape(1), ctx) * (styleWeight / gScale(i))) - } - gradArray = gradArray :+ (NDArray.ones(Shape(1), ctx) * contentWeight) - - // generator - val gens = Array( - GenV4.getModule("g0", dShape, ctx), - GenV3.getModule("g1", dShape, ctx), - GenV3.getModule("g2", dShape, ctx), - GenV4.getModule("g3", dShape, ctx) - ) - gens.foreach { gen => - val opt = new SGD(learningRate = 1e-4f, - momentum = 0.9f, - wd = 5e-3f, - clipGradient = 5f) - gen.initOptimizer(opt) - } + def runTraining(dataPath : String, vggModelPath: String, ctx : Context, + styleImage : String, saveModelPath : String) : Unit = { + // params + val vggParams = NDArray.load2Map(vggModelPath) + val styleWeight = 1.2f + val contentWeight = 10f + val dShape = Shape(1, 3, 384, 384) + val clipNorm = 0.05f * dShape.product + val modelPrefix = "v3" + // init style + val styleNp = DataProcessing.preprocessStyleImage(styleImage, dShape, ctx) + var styleMod = Basic.getStyleModule("style", dShape, ctx, vggParams) + styleMod.forward(Array(styleNp)) + val styleArray = styleMod.getOutputs().map(_.copyTo(Context.cpu())) + styleMod.dispose() + styleMod = null + + // content + val contentMod = Basic.getContentModule("content", dShape, ctx, vggParams) + + // loss + val (loss, gScale) = Basic.getLossModule("loss", dShape, ctx, vggParams) + val extraArgs = (0 until styleArray.length) + .map( i => s"target_gram_$i" -> styleArray(i)).toMap + loss.setParams(extraArgs) + var gradArray = Array[NDArray]() + for (i <- 0 until styleArray.length) { + gradArray = gradArray :+ (NDArray.ones(Shape(1), ctx) * (styleWeight / gScale(i))) + } + gradArray = gradArray :+ (NDArray.ones(Shape(1), ctx) * contentWeight) + + // generator + val gens = Array( + GenV4.getModule("g0", dShape, ctx), + GenV3.getModule("g1", dShape, ctx), + GenV3.getModule("g2", dShape, ctx), + GenV4.getModule("g3", dShape, ctx) + ) + gens.foreach { gen => + val opt = new SGD(learningRate = 1e-4f, + momentum = 0.9f, + wd = 5e-3f, + clipGradient = 5f) + gen.initOptimizer(opt) + } - var filelist = new File(stin.dataPath).list().toList - val numImage = filelist.length - logger.info(s"Dataset size: $numImage") + var filelist = new File(dataPath).list().toList + val numImage = filelist.length + logger.info(s"Dataset size: $numImage") - val tvWeight = 1e-2f + val tvWeight = 1e-2f - val startEpoch = 0 - val endEpoch = 3 + val startEpoch = 0 + val endEpoch = 3 - for (k <- 0 until gens.length) { - val path = new File(s"${stin.saveModelPath}/$k") - if (!path.exists()) path.mkdir() - } + for (k <- 0 until gens.length) { + val path = new File(s"${saveModelPath}/$k") + if (!path.exists()) path.mkdir() + } - // train - for (i <- startEpoch until endEpoch) { - filelist = Random.shuffle(filelist) - for (idx <- filelist.indices) { - var dataArray = Array[NDArray]() - var lossGradArray = Array[NDArray]() - val data = - DataProcessing.preprocessContentImage(s"${stin.dataPath}/${filelist(idx)}", dShape, ctx) - dataArray = dataArray :+ data - // get content - contentMod.forward(Array(data)) - // set target content - loss.setParams(Map("target_content" -> contentMod.getOutputs()(0))) - // gen_forward - for (k <- 0 until gens.length) { - gens(k).forward(dataArray.takeRight(1)) - dataArray = dataArray :+ gens(k).getOutputs()(0) - // loss forward - loss.forward(dataArray.takeRight(1)) - loss.backward(gradArray) - lossGradArray = lossGradArray :+ loss.getInputGrads()(0) - } - val grad = NDArray.zeros(data.shape, ctx) - for (k <- gens.length - 1 to 0 by -1) { - val tvGradExecutor = getTvGradExecutor(gens(k).getOutputs()(0), ctx, tvWeight) - tvGradExecutor.forward() - grad += lossGradArray(k) + tvGradExecutor.outputs(0) - val gNorm = NDArray.norm(grad) - if (gNorm.toScalar > clipNorm) { - grad *= clipNorm / gNorm.toScalar - } - gens(k).backward(Array(grad)) - gens(k).update() - gNorm.dispose() - tvGradExecutor.dispose() + // train + for (i <- startEpoch until endEpoch) { + filelist = Random.shuffle(filelist) + for (idx <- filelist.indices) { + var dataArray = Array[NDArray]() + var lossGradArray = Array[NDArray]() + val data = + DataProcessing.preprocessContentImage(s"${dataPath}/${filelist(idx)}", dShape, ctx) + dataArray = dataArray :+ data + // get content + contentMod.forward(Array(data)) + // set target content + loss.setParams(Map("target_content" -> contentMod.getOutputs()(0))) + // gen_forward + for (k <- 0 until gens.length) { + gens(k).forward(dataArray.takeRight(1)) + dataArray = dataArray :+ gens(k).getOutputs()(0) + // loss forward + loss.forward(dataArray.takeRight(1)) + loss.backward(gradArray) + lossGradArray = lossGradArray :+ loss.getInputGrads()(0) + } + val grad = NDArray.zeros(data.shape, ctx) + for (k <- gens.length - 1 to 0 by -1) { + val tvGradExecutor = getTvGradExecutor(gens(k).getOutputs()(0), ctx, tvWeight) + tvGradExecutor.forward() + grad += lossGradArray(k) + tvGradExecutor.outputs(0) + val gNorm = NDArray.norm(grad) + if (gNorm.toScalar > clipNorm) { + grad *= clipNorm / gNorm.toScalar } - grad.dispose() - if (idx % 20 == 0) { - logger.info(s"Epoch $i: Image $idx") - for (k <- 0 until gens.length) { - val n = NDArray.norm(gens(k).getInputGrads()(0)) - logger.info(s"Data Norm : ${n.toScalar / dShape.product}") - n.dispose() - } + gens(k).backward(Array(grad)) + gens(k).update() + gNorm.dispose() + tvGradExecutor.dispose() + } + grad.dispose() + if (idx % 20 == 0) { + logger.info(s"Epoch $i: Image $idx") + for (k <- 0 until gens.length) { + val n = NDArray.norm(gens(k).getInputGrads()(0)) + logger.info(s"Data Norm : ${n.toScalar / dShape.product}") + n.dispose() } - if (idx % 1000 == 0) { - for (k <- 0 until gens.length) { - gens(k).saveParams( - s"${stin.saveModelPath}/$k/${modelPrefix}_" + - s"${"%04d".format(i)}-${"%07d".format(idx)}.params") - } + } + if (idx % 1000 == 0) { + for (k <- 0 until gens.length) { + gens(k).saveParams( + s"${saveModelPath}/$k/${modelPrefix}_" + + s"${"%04d".format(i)}-${"%07d".format(idx)}.params") } - data.dispose() } + data.dispose() } + } + } + + def main(args: Array[String]): Unit = { + val stin = new BoostTrain + val parser: CmdLineParser = new CmdLineParser(stin) + try { + parser.parseArgument(args.toList.asJava) + assert(stin.dataPath != null + && stin.vggModelPath != null + && stin.saveModelPath != null + && stin.styleImage != null) + + val ctx = if (stin.gpu == -1) Context.cpu() else Context.gpu(stin.gpu) + runTraining(stin.dataPath, stin.vggModelPath, ctx, stin.styleImage, stin.saveModelPath) } catch { case ex: Exception => { logger.error(ex.getMessage, ex) diff --git a/scala-package/examples/src/test/scala/org/apache/mxnetexamples/gan/GanExampleSuite.scala b/scala-package/examples/src/test/scala/org/apache/mxnetexamples/gan/GanExampleSuite.scala index 12459fb1cc19..967a408c685d 100644 --- a/scala-package/examples/src/test/scala/org/apache/mxnetexamples/gan/GanExampleSuite.scala +++ b/scala-package/examples/src/test/scala/org/apache/mxnetexamples/gan/GanExampleSuite.scala @@ -49,7 +49,7 @@ class GanExampleSuite extends FunSuite with BeforeAndAfterAll{ val context = Context.gpu() - val output = GanMnist.runTraining(modelDirPath, context, modelDirPath, 5) + val output = GanMnist.runTraining(modelDirPath, context, modelDirPath, 2) Process("rm -rf " + modelDirPath) ! assert(output >= 0.0f) diff --git a/scala-package/examples/src/test/scala/org/apache/mxnetexamples/neuralstyle/NeuralStyleSuite.scala b/scala-package/examples/src/test/scala/org/apache/mxnetexamples/neuralstyle/NeuralStyleSuite.scala index a59a97780ee2..1b657e8ebdac 100644 --- a/scala-package/examples/src/test/scala/org/apache/mxnetexamples/neuralstyle/NeuralStyleSuite.scala +++ b/scala-package/examples/src/test/scala/org/apache/mxnetexamples/neuralstyle/NeuralStyleSuite.scala @@ -22,7 +22,7 @@ import java.net.URL import org.apache.commons.io.FileUtils import org.apache.mxnet.Context -import org.apache.mxnetexamples.neuralstyle.end2end.BoostInference +import org.apache.mxnetexamples.neuralstyle.end2end.{BoostInference, BoostTrain} import org.scalatest.{BeforeAndAfterAll, FunSuite} import org.slf4j.LoggerFactory @@ -43,24 +43,59 @@ class NeuralStyleSuite extends FunSuite with BeforeAndAfterAll { } } - test("Example CI: Test Boost Inference") { + override def beforeAll(): Unit = { logger.info("Downloading vgg model") val tempDirPath = System.getProperty("java.io.tmpdir") logger.info("tempDirPath: %s".format(tempDirPath)) val baseUrl = "https://s3.us-east-2.amazonaws.com/mxnet-scala/scala-example-ci/NeuralStyle/" downloadUrl(baseUrl + "IMG_4343.jpg", tempDirPath + "/NS/IMG_4343.jpg") + downloadUrl(baseUrl + "starry_night.jpg", tempDirPath + "/NS/starry_night.jpg") downloadUrl(baseUrl + "model.zip", tempDirPath + "/NS/model.zip") + downloadUrl(baseUrl + "vgg19.params", tempDirPath + "/NS/vgg19.params") + // TODO: Need to confirm with Windows + Process(s"unzip $tempDirPath/NS/model.zip -d $tempDirPath/NS/") ! + + Process(s"mkdir $tempDirPath/NS/images") ! + + for (i <- 0 until 20) { + Process(s"cp $tempDirPath/NS/IMG_4343.jpg $tempDirPath/NS/images/img$i.jpg") ! + } + } + + test("Example CI: Test Boost Inference") { + val tempDirPath = System.getProperty("java.io.tmpdir") var ctx = Context.cpu() if (System.getenv().containsKey("SCALA_TEST_ON_GPU") && System.getenv("SCALA_TEST_ON_GPU").toInt == 1) { ctx = Context.gpu() } - - // TODO: Need to confirm with Windows - Process("unzip " + tempDirPath + "/NS/model.zip -d " - + tempDirPath + "/NS/") ! - BoostInference.runInference(tempDirPath + "/NS/model", tempDirPath + "/NS", 2, tempDirPath + "/NS/IMG_4343.jpg", ctx) } + + test("Example CI: Test Boost Training") { + val tempDirPath = System.getProperty("java.io.tmpdir") + if (System.getenv().containsKey("SCALA_TEST_ON_GPU") && + System.getenv("SCALA_TEST_ON_GPU").toInt == 1) { + val ctx = Context.gpu() + BoostTrain.runTraining(tempDirPath + "/NS/images", tempDirPath + "/NS/vgg19.params", ctx, + tempDirPath + "/NS/starry_night.jpg", tempDirPath + "/NS") + } else { + logger.info("GPU test only, skip CPU...") + } + } + + test("Example CI: Test Neural Style") { + val tempDirPath = System.getProperty("java.io.tmpdir") + if (System.getenv().containsKey("SCALA_TEST_ON_GPU") && + System.getenv("SCALA_TEST_ON_GPU").toInt == 1) { + val ctx = Context.gpu() + NeuralStyle.runTraining("vgg19", tempDirPath + "/NS/IMG_4343.jpg", + tempDirPath + "/NS/starry_night.jpg", + ctx, tempDirPath + "/NS/vgg19.params", tempDirPath + "/NS", + 1f, 20f, 0.01f, 1, 10f, 60, 600, 50, 0.0005f) + } else { + logger.info("GPU test only, skip CPU") + } + } }