Safe Haskell | None |
---|---|
Language | Haskell2010 |
Synopsis
- type Minimizer a = forall m. MonadBuild m => [Variable a] -> [Tensor Value a] -> m ControlNode
- minimizeWith :: (MonadBuild m, GradientCompatible a) => Minimizer a -> Tensor v a -> [Variable a] -> m ControlNode
- gradientDescent :: GradientCompatible a => a -> Minimizer a
- type OneOfAdamDataTypes t = OneOf '[Complex Double, Complex Float, Int16, Int32, Int64, Int8, Word16, Word32, Word64, Word8, Double, Float] t
- data AdamConfig t = AdamConfig {
- adamLearningRate :: t
- adamBeta1 :: t
- adamBeta2 :: t
- adamEpsilon :: t
- adam :: (OneOfAdamDataTypes t, Fractional t) => Minimizer t
- adam' :: OneOfAdamDataTypes t => AdamConfig t -> Minimizer t
Documentation
type Minimizer a = forall m. MonadBuild m => [Variable a] -> [Tensor Value a] -> m ControlNode Source #
:: (MonadBuild m, GradientCompatible a) | |
=> Minimizer a | |
-> Tensor v a | Loss. |
-> [Variable a] | Parameters of the loss function. |
-> m ControlNode |
:: GradientCompatible a | |
=> a | Learning rate. |
-> Minimizer a |
Perform one step of the gradient descent algorithm.
type OneOfAdamDataTypes t = OneOf '[Complex Double, Complex Float, Int16, Int32, Int64, Int8, Word16, Word32, Word64, Word8, Double, Float] t Source #
data AdamConfig t Source #
AdamConfig | |
|
Instances
Fractional t => Default (AdamConfig t) Source # | |
Defined in TensorFlow.Minimize def :: AdamConfig t |
adam :: (OneOfAdamDataTypes t, Fractional t) => Minimizer t Source #
Perform one step of the adam algorithm.
See https://arxiv.org/abs/1412.6980.
NOTE: Currently requires all Variable
s to have an initializedValue
.
adam' :: OneOfAdamDataTypes t => AdamConfig t -> Minimizer t Source #