From 915015018ca170d47b62757ab9ad21913f571b3c Mon Sep 17 00:00:00 2001 From: Rik Date: Wed, 14 Nov 2018 18:08:05 +0100 Subject: [PATCH] Added support for tanh activation function (#223) --- tensorflow-ops/src/TensorFlow/Gradient.hs | 6 +++++- tensorflow-ops/src/TensorFlow/Ops.hs | 2 ++ tensorflow-ops/tests/GradientTest.hs | 9 +++++++++ 3 files changed, 16 insertions(+), 1 deletion(-) diff --git a/tensorflow-ops/src/TensorFlow/Gradient.hs b/tensorflow-ops/src/TensorFlow/Gradient.hs index dc046fc..34a9dea 100644 --- a/tensorflow-ops/src/TensorFlow/Gradient.hs +++ b/tensorflow-ops/src/TensorFlow/Gradient.hs @@ -45,7 +45,7 @@ import Lens.Family2 (Lens', view, (&), (^.), (.~), (%~)) import Lens.Family2.State.Strict (uses) import Lens.Family2.Stock (at, intAt) import Lens.Family2.Unchecked (lens, iso) -import Prelude hiding (sum) +import Prelude hiding (sum, tanh) import Text.Printf (printf) import qualified Data.Graph.Inductive.Basic as FGL import qualified Data.Graph.Inductive.Graph as FGL @@ -76,6 +76,8 @@ import TensorFlow.Ops , matMul' , reducedShape , reluGrad + , tanh + , tanhGrad , reshape , scalar , shape @@ -459,6 +461,7 @@ opGrad "Abs" _ [toT -> x] [dz] = [Just $ expr dz * signum x] opGrad "Neg" _ [_] [dz] = [Just $ negate $ expr dz] opGrad "Relu" _ [toT -> x] [dz] = [Just $ reluGrad dz x] opGrad "ReluGrad" _ [_, toT -> x ] [dz] = [Just $ reluGrad dz x, Just $ CoreOps.zerosLike x] +opGrad "Tanh" _ [toT -> x] [dz] = [Just $ tanhGrad (tanh x) dz] opGrad "Concat" _ _ix [dy] -- Concat concatenates input tensors @@ -833,6 +836,7 @@ numOutputs o = "SparseSegmentSum" -> 1 "Sub" -> 1 "Sum" -> 1 + "Tanh" -> 1 "Tile" -> 1 "Transpose" -> 1 "TruncatedNormal" -> 1 diff --git a/tensorflow-ops/src/TensorFlow/Ops.hs b/tensorflow-ops/src/TensorFlow/Ops.hs index ca7b9b2..d8da97f 100644 --- a/tensorflow-ops/src/TensorFlow/Ops.hs +++ b/tensorflow-ops/src/TensorFlow/Ops.hs @@ -112,6 +112,8 @@ module TensorFlow.Ops , CoreOps.relu' , CoreOps.reluGrad , CoreOps.reluGrad' + , CoreOps.tanh + , CoreOps.tanhGrad , CoreOps.reshape , CoreOps.reshape' , restore diff --git a/tensorflow-ops/tests/GradientTest.hs b/tensorflow-ops/tests/GradientTest.hs index b5d20db..b01971b 100644 --- a/tensorflow-ops/tests/GradientTest.hs +++ b/tensorflow-ops/tests/GradientTest.hs @@ -282,6 +282,14 @@ testReluGradGrad = testCase "testReluGradGrad" $ do TF.gradients y' [x] >>= TF.run V.fromList [0] @=? dx +testTanhGrad :: Test +testTanhGrad = testCase "testTanhGrad" $ do + [dx] <- TF.runSession $ do + x <- TF.render $ TF.vector [0 :: Float] + let y = TF.tanh x + TF.gradients y [x] >>= TF.run + V.fromList [1] @=? dx + testFillGrad :: Test testFillGrad = testCase "testFillGrad" $ do [dx] <- TF.runSession $ do @@ -427,6 +435,7 @@ main = defaultMain , testMaximumGradGrad , testReluGrad , testReluGradGrad + , testTanhGrad , testFillGrad , testTileGrad , testTile2DGrad