Legend:
Page
Library
Module
Module type
Parameter
Class
Class type
Source
Source file owl_algodiff_generic_sig.ml
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101# 1 "src/base/algodiff/owl_algodiff_generic_sig.ml"(*
* OWL - OCaml Scientific Computing
* Copyright (c) 2016-2022 Liang Wang <liang@ocaml.xyz>
*)moduletypeSig=sigincludeOwl_algodiff_core_sig.Sigvalmake_forward:t->t->int->t(** [make_forward p t i] construct a forward algodiff data type DF, consisting of
primal [p], tangent [t], and tag [i]. *)valmake_reverse:t->int->t(** [make_reverse p i ] construct a reverse algodiff data type DR, consisting of
primal, adjoint, op, fanout, tag, and tracker. *)valreverse_prop:t->t->unit(** [reverse_prop f x] performs reverse propagation for function [f] using the output value [x]. *)valdiff:(t->t)->t->t(** [diff f x] returns the exat derivative of a function [f : scalar -> scalar] at
point [x]. Simply calling [diff f] will return its derivative function [g] of
the same type, i.e. [g : scalar -> scalar].
Keep calling this function will give you higher-order derivatives of [f], i.e.
[f |> diff |> diff |> diff |> ...] *)valdiff':(t->t)->t->t*t(** similar to [diff], but return [(f x, diff f x)]. *)valgrad:(t->t)->t->t(** gradient of [f] : (vector -> scalar) at [x], reverse ad. *)valgrad':(t->t)->t->t*t(** similar to [grad], but return [(f x, grad f x)]. *)valjacobian:(t->t)->t->t(** jacobian of [f] : (vector -> vector) at [x], both [x] and [y] are row
vectors. *)valjacobian':(t->t)->t->t*t(** similar to [jacobian], but return [(f x, jacobian f x)] *)valjacobianv:(t->t)->t->t->t(** jacobian vector product of [f] : (vector -> vector) at [x] along [v], forward
ad. Namely, it calcultes [(jacobian x) v] *)valjacobianv':(t->t)->t->t->t*t(** similar to [jacobianv'], but return [(f x, jacobianv f x v)] *)valjacobianTv:(t->t)->t->t->t(** transposed jacobian vector product of [f : (vector -> vector)] at [x] along
[v], backward ad. Namely, it calculates [transpose ((jacobianv f x v))]. *)valjacobianTv':(t->t)->t->t->t*t(** similar to [jacobianTv], but return [(f x, transpose (jacobianv f x v))] *)valhessian:(t->t)->t->t(** hessian of [f] : (scalar -> scalar) at [x]. *)valhessian':(t->t)->t->t*t(** simiarl to [hessian], but return [(f x, hessian f x)] *)valhessianv:(t->t)->t->t->t(** hessian vector product of [f] : (scalar -> scalar) at [x] along [v]. Namely,
it calculates [(hessian x) v]. *)valhessianv':(t->t)->t->t->t*t(** similar to [hessianv], but return [(f x, hessianv f x v)]. *)vallaplacian:(t->t)->t->t(** laplacian of [f : (scalar -> scalar)] at [x]. *)vallaplacian':(t->t)->t->t*t(** similar to [laplacian], but return [(f x, laplacian f x)]. *)valgradhessian:(t->t)->t->t*t(** return [(grad f x, hessian f x)], [f : (scalar -> scalar)] *)valgradhessian':(t->t)->t->t*t*t(** return [(f x, grad f x, hessian f x)] *)valgradhessianv:(t->t)->t->t->t*t(** return [(grad f x v, hessian f x v)] *)valgradhessianv':(t->t)->t->t->t*t*t(** return [(f x, grad f x v, hessian f x v)] *)(* Operations *)includeOwl_algodiff_ops_sig.Sigwithtypet:=tandtypeelt:=A.eltandtypearr:=A.arrandtypeop:=op(** {5 Helper functions} *)includeOwl_algodiff_graph_convert_sig.Sigwithtypet:=tend