Gradient for AddN

This commit is contained in:
Christian Berentsen 2017-06-16 13:42:33 +02:00 committed by fkm3
parent 41f4c8a235
commit ea30577264
2 changed files with 16 additions and 0 deletions

View File

@ -511,6 +511,11 @@ opGrad "Add" _ [toT -> x, toT -> y] [dz] =
sy = shape (y :: Tensor Build a)
(rx, ry) = broadcastGradientArgs sx sy
-- Copies the gradients to all inputs
-- Not broadcasting
opGrad "AddN" _ inputs [dz] =
map ((const . Just . expr) dz) inputs
opGrad "Sub" u v w =
[Just x, Just (-y)]
where
@ -711,6 +716,7 @@ numOutputs o =
case o ^. op of
"Abs" -> 1
"Add" -> 1
"AddN" -> 1
"Cast" -> 1
"Const" -> 1
"Conv2D" -> 1

View File

@ -156,6 +156,15 @@ testDiamond = testCase "testDiamond" $ do
(4 :: Float) @=? TF.unScalar dx
testAddNGradient :: Test
testAddNGradient = testCase "testAddNGradient" $ do
[dx] <- TF.runSession $ do
x <- TF.render $ TF.vector [1, 2, 0 :: Float]
let y = TF.addN [x, x]
TF.gradients y [x] >>= TF.run
V.fromList [2, 2, 2 :: Float] @=? dx
testMaxGradient :: Test
testMaxGradient = testCase "testMaxGradient" $ do
[dx] <- TF.runSession $ do
@ -298,6 +307,7 @@ main = defaultMain
, testCreateGraphStateful
, testCreateGraphNameScopes
, testDiamond
, testAddNGradient
, testMaxGradient
, testReluGrad
, testReluGradGrad