package kaun

  1. Overview
  2. Docs
Flax-inspired neural network library for OCaml

Install

dune-project
 Dependency

Authors

Maintainers

Sources

raven-1.0.0.alpha1.tbz
sha256=8e277ed56615d388bc69c4333e43d1acd112b5f2d5d352e2453aef223ff59867
sha512=369eda6df6b84b08f92c8957954d107058fb8d3d8374082e074b56f3a139351b3ae6e3a99f2d4a4a2930dd950fd609593467e502368a13ad6217b571382da28c

doc/src/kaun/activations.ml.html

Source file activations.ml

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
open Rune

(* Standard Activations *)

let relu = relu
let relu6 = relu6
let sigmoid = sigmoid
let tanh = tanh
let softmax = softmax

(* Modern Activations *)

let gelu = gelu
let silu = silu
let swish = silu (* Alias for silu *)
let mish = mish

(* Parametric Activations *)

let leaky_relu = leaky_relu
let elu = elu
let selu = selu

let prelu alpha x =
  (* max(0, x) + alpha * min(0, x) *)
  let zero = zeros_like x in
  add (maximum zero x) (mul alpha (minimum zero x))

(* Gated Linear Units (GLUs) *)

let glu x gate =
  (* x * sigmoid(gate) *)
  mul x (sigmoid gate)

let swiglu x =
  (* x * silu(x) *)
  mul x (silu x)

let geglu x gate =
  (* x * gelu(gate) *)
  mul x (gelu gate)

let reglu x gate =
  (* x * relu(gate) *)
  mul x (relu gate)

(* Other Activations *)

let softplus = softplus
let softsign = softsign
let hard_sigmoid = hard_sigmoid
let hard_tanh = hard_tanh

let hard_swish x =
  (* x * relu6(x + 3) / 6 - Not in Rune, but hard_silu is *)
  hard_silu x (* hard_silu is essentially the same as hard_swish *)