1
0
Fork 0
mirror of https://github.com/tensorflow/haskell.git synced 2024-11-26 21:09:44 +01:00

Add gradient for sigmoid (#245)

This commit is contained in:
rschlotterbeck 2019-07-08 02:18:02 +02:00 committed by fkm3
parent 1fbd5d41dd
commit c811037cb9
3 changed files with 15 additions and 0 deletions

View file

@ -85,6 +85,8 @@ import TensorFlow.Ops
, shape , shape
, softmaxCrossEntropyWithLogits , softmaxCrossEntropyWithLogits
, sum , sum
, sigmoid
, sigmoidGrad
, scalarize , scalarize
, vector , vector
, zerosLike , zerosLike
@ -481,6 +483,7 @@ opGrad "Neg" _ [_] [dz] = [Just $ negate $ expr dz]
opGrad "Relu" _ [toT -> x] [dz] = [Just $ reluGrad dz x] opGrad "Relu" _ [toT -> x] [dz] = [Just $ reluGrad dz x]
opGrad "ReluGrad" _ [_, toT -> x ] [dz] = [Just $ reluGrad dz x, Just $ CoreOps.zerosLike x] opGrad "ReluGrad" _ [_, toT -> x ] [dz] = [Just $ reluGrad dz x, Just $ CoreOps.zerosLike x]
opGrad "Tanh" _ [toT -> x] [dz] = [Just $ tanhGrad (tanh x) dz] opGrad "Tanh" _ [toT -> x] [dz] = [Just $ tanhGrad (tanh x) dz]
opGrad "Sigmoid" _ [toT -> x] [dz] = [Just $ sigmoidGrad (sigmoid x) dz]
opGrad "Concat" _ _ix [dy] opGrad "Concat" _ _ix [dy]
-- Concat concatenates input tensors -- Concat concatenates input tensors
@ -947,6 +950,7 @@ numOutputs o =
"ReluGrad" -> 1 "ReluGrad" -> 1
"Reshape" -> 1 "Reshape" -> 1
"Select" -> 1 "Select" -> 1
"Sigmoid" -> 1
"Size" -> 1 "Size" -> 1
"Slice" -> 1 "Slice" -> 1
"SoftmaxCrossEntropyWithLogits" -> 2 "SoftmaxCrossEntropyWithLogits" -> 2

View file

@ -123,6 +123,8 @@ module TensorFlow.Ops
, scalar' , scalar'
, shape , shape
, shape' , shape'
, CoreOps.sigmoid
, CoreOps.sigmoidGrad
, CoreOps.sign , CoreOps.sign
, CoreOps.sign' , CoreOps.sign'
, CoreOps.size , CoreOps.size

View file

@ -368,6 +368,14 @@ testTanhGrad = testCase "testTanhGrad" $ do
TF.gradients y [x] >>= TF.run TF.gradients y [x] >>= TF.run
V.fromList [1] @=? dx V.fromList [1] @=? dx
testSigmoidGrad :: Test
testSigmoidGrad = testCase "testSigmoidGrad" $ do
[dx] <- TF.runSession $ do
x <- TF.render $ TF.vector [0 :: Float]
let y = TF.sigmoid x
TF.gradients y [x] >>= TF.run
V.fromList [0.25] @=? dx
testExpandDims :: Test testExpandDims :: Test
testExpandDims = testExpandDims =
testCase "testExpandDims" $ do testCase "testExpandDims" $ do
@ -681,6 +689,7 @@ main = defaultMain
, testReluGrad , testReluGrad
, testReluGradGrad , testReluGradGrad
, testTanhGrad , testTanhGrad
, testSigmoidGrad
, testExpandDims , testExpandDims
, testReshape , testReshape
, testPad , testPad