Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 4 additions & 12 deletions js/neuralnetwork_builder.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import * as opt from '../../lib/model/nns/optimizer.js'

const layerTypes = {
abs: {},
clip: { min: 0, max: 1 },
Expand Down Expand Up @@ -192,22 +194,12 @@ export default class NeuralNetworkBuilder {
this._opt = r.append('select').attr('name', 'optimizer')
this._opt
.selectAll('option')
.data([
'sgd',
'adam',
'momentum',
'adagrad',
'rmsprop',
'adadelta',
'rmspropgraves',
'smorms3',
'adamax',
'nadam',
])
.data(Object.keys(opt))
.enter()
.append('option')
.property('value', d => d)
.text(d => d)
this._opt.property('value', 'adam')
}
}
}
15 changes: 3 additions & 12 deletions js/view/ladder_network.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import Controller from '../controller.js'
import { BaseWorker } from '../utils.js'
import * as opt from '../../lib/model/nns/optimizer.js'

class LadderNetworkWorker extends BaseWorker {
constructor() {
Expand Down Expand Up @@ -85,18 +86,8 @@ export default function (platform) {

const optimizer = controller.select({
label: ' Optimizer ',
values: [
'sgd',
'adam',
'momentum',
'adagrad',
'rmsprop',
'adadelta',
'rmspropgraves',
'smorms3',
'adamax',
'nadam',
],
values: Object.keys(opt),
value: 'adam',
})
const slbConf = controller.stepLoopButtons().init(done => {
if (platform.datas.length === 0) {
Expand Down
4 changes: 2 additions & 2 deletions lib/model/mlp.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import Matrix from '../util/matrix.js'
import { AdamOptimizer } from './nns/optimizer.js'
import { adam } from './nns/optimizer.js'

/**
* @ignore
Expand Down Expand Up @@ -31,7 +31,7 @@ class MLP {
this._w[i] = Matrix.randn(layer_sizes[i], layer_sizes[i + 1], 0, 0.1)
this._b[i] = Matrix.zeros(1, layer_sizes[i + 1])
}
this._optimizer = new AdamOptimizer()
this._optimizer = new adam()
this._optimizer_mng = this._optimizer.manager()
}

Expand Down
24 changes: 4 additions & 20 deletions lib/model/neuralnetwork.js
Original file line number Diff line number Diff line change
Expand Up @@ -84,31 +84,15 @@ export default class NeuralNetwork {

/**
* @param {ComputationalGraph} graph Graph of a network
* @param {'sgd' | 'adam' | 'momentum' | 'adagrad' | 'rmsprop' | 'adadelta' | 'rmspropgraves' | 'smorms3' | 'adamax' | 'nadam'} [optimizer] Optimizer of the network
* @param {'sgd' | 'adam' | 'momentum' | 'adagrad' | 'rmsprop' | 'adadelta' | 'rmspropgraves' | 'smorms3' | 'adamax' | 'nadam' | 'santae' | 'santasss' | 'amsgrad' | 'adabound' | 'amsbound' | 'adabelief'} [optimizer] Optimizer of the network
*/
constructor(graph, optimizer = 'sgd') {
this._graph = graph
this._optimizer = optimizer
if (optimizer === 'adam') {
this._opt = new opt.AdamOptimizer()
} else if (optimizer === 'momentum') {
this._opt = new opt.MomentumOptimizer()
} else if (optimizer === 'adagrad') {
this._opt = new opt.AdaGradOptimizer()
} else if (optimizer === 'rmsprop') {
this._opt = new opt.RMSPropOptimizer()
} else if (optimizer === 'adadelta') {
this._opt = new opt.AdaDeltaOptimizer()
} else if (optimizer === 'rmspropgraves') {
this._opt = new opt.RMSPropGravesOptimizer()
} else if (optimizer === 'smorms3') {
this._opt = new opt.SMORMS3Optimizer()
} else if (optimizer === 'adamax') {
this._opt = new opt.AdaMaxOptimizer()
} else if (optimizer === 'nadam') {
this._opt = new opt.NadamOptimizer()
if (Object.keys(opt).includes(optimizer)) {
this._opt = new opt[optimizer]()
} else {
this._opt = new opt.SGDOptimizer()
this._opt = new opt.sgd()
}
this._opt_managers = []
for (let i = 0; i < this._graph.size; i++) {
Expand Down
26 changes: 16 additions & 10 deletions lib/model/nns/optimizer.js
Original file line number Diff line number Diff line change
@@ -1,10 +1,16 @@
export { SGDOptimizer } from './optimizer/sgd.js'
export { MomentumOptimizer } from './optimizer/momentum.js'
export { AdaGradOptimizer } from './optimizer/adagrad.js'
export { RMSPropOptimizer } from './optimizer/rmsprop.js'
export { AdaDeltaOptimizer } from './optimizer/adadelta.js'
export { AdamOptimizer } from './optimizer/adam.js'
export { RMSPropGravesOptimizer } from './optimizer/rmspropgraves.js'
export { SMORMS3Optimizer } from './optimizer/smorms3.js'
export { AdaMaxOptimizer } from './optimizer/adamax.js'
export { NadamOptimizer } from './optimizer/nadam.js'
export { SGDOptimizer as sgd } from './optimizer/sgd.js'
export { MomentumOptimizer as momentum } from './optimizer/momentum.js'
export { AdaGradOptimizer as adagrad } from './optimizer/adagrad.js'
export { RMSPropOptimizer as rmsprop } from './optimizer/rmsprop.js'
export { AdaDeltaOptimizer as adadelta } from './optimizer/adadelta.js'
export { AdamOptimizer as adam } from './optimizer/adam.js'
export { RMSPropGravesOptimizer as rmspropgraves } from './optimizer/rmspropgraves.js'
export { SMORMS3Optimizer as smorms3 } from './optimizer/smorms3.js'
export { AdaMaxOptimizer as adamax } from './optimizer/adamax.js'
export { NadamOptimizer as nadam } from './optimizer/nadam.js'
export { SantaEOptimizer as santae } from './optimizer/santae.js'
export { SantaSSSOptimizer as santasss } from './optimizer/santasss.js'
export { AMSGradOptimizer as amsgrad } from './optimizer/amsgrad.js'
export { AdaBoundOptimizer as adabound } from './optimizer/adabound.js'
export { AMSBoundOptimizer as amsbound } from './optimizer/amsbound.js'
export { AdaBeliefOptimizer as adabelief } from './optimizer/adabelief.js'
44 changes: 44 additions & 0 deletions lib/model/nns/optimizer/adabelief.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
import Matrix from '../../../util/matrix.js'

export class AdaBeliefOptimizer {
constructor(lr = 0.001, beta1 = 0.9, beta2 = 0.999) {
this._learningrate = lr
this._beta1 = beta1
this._beta2 = beta2
}

set learningRate(value) {
this._learningrate = value
}

manager() {
const this_ = this
return {
get lr() {
return this_._learningrate
},
params: {},
delta(key, value) {
const valueIsNumber = typeof value === 'number'
if (valueIsNumber) {
value = new Matrix(1, 1, value)
}
if (!this.params[key]) {
const z = value.copy()
z.fill(0)
this.params[key] = { m: z.copy(), v: z, t: 1 }
}
this.params[key].m.broadcastOperate(value, (a, b) => a * this_._beta1 + b * (1 - this_._beta1))
const mo = this.params[key].m.copy()
mo.broadcastOperate(value, (a, b) => b - a)
this.params[key].v.broadcastOperate(mo, (a, b) => a * this_._beta2 + (1 - this_._beta2) * b * b)
const nv = 1 - this_._beta1 ** this.params[key].t
const ns = 1 - this_._beta2 ** this.params[key].t
const ret = this.params[key].m.copy()
ret.broadcastOperate(this.params[key].v, (a, b) => (a / nv) * (this.lr / Math.sqrt(b / ns + 1.0e-12)))
this.params[key].t++
return valueIsNumber ? ret.toScaler() : ret
},
}
}
}
48 changes: 48 additions & 0 deletions lib/model/nns/optimizer/adabound.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import Matrix from '../../../util/matrix.js'

export class AdaBoundOptimizer {
constructor(lr = 0.001, alpha = 0.003, beta1 = 0.9, beta2 = 0.999) {
this._learningrate = lr
this._alpha = alpha
this._beta1 = beta1
this._beta2 = beta2

this._eta_lbound = t => this._learningrate * (1 - 1 / ((1 - beta2) * t + 1))
this._eta_ubound = t => this._learningrate * (1 + 1 / ((1 - beta2) * t + 1))
}

set learningRate(value) {
this._learningrate = value
}

manager() {
const this_ = this
return {
get lr() {
return this_._learningrate
},
params: {},
delta(key, value) {
const valueIsNumber = typeof value === 'number'
if (valueIsNumber) {
value = new Matrix(1, 1, value)
}
if (!this.params[key]) {
const z = value.copy()
z.fill(0)
this.params[key] = { m: z.copy(), v: z, t: 1 }
}
this.params[key].m.broadcastOperate(value, (a, b) => a * this_._beta1 + b * (1 - this_._beta1))
this.params[key].v.broadcastOperate(value, (a, b) => a * this_._beta2 + (1 - this_._beta2) * b * b)
const eta_lb = this_._eta_lbound(this.params[key].t)
const eta_ub = this_._eta_ubound(this.params[key].t)
const eta = this.params[key].v.copy()
eta.map(v => Math.min(eta_ub, Math.max(eta_lb, this_._alpha / Math.sqrt(v))))
const ret = this.params[key].m.copy()
ret.broadcastOperate(eta, (a, b) => (a * b) / Math.sqrt(this.params[key].t))
this.params[key].t++
return valueIsNumber ? ret.toScaler() : ret
},
}
}
}
49 changes: 49 additions & 0 deletions lib/model/nns/optimizer/amsbound.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
import Matrix from '../../../util/matrix.js'

export class AMSBoundOptimizer {
constructor(lr = 0.001, alpha = 0.003, beta1 = 0.9, beta2 = 0.999) {
this._learningrate = lr
this._alpha = alpha
this._beta1 = beta1
this._beta2 = beta2

this._eta_lbound = t => this._learningrate * (1 - 1 / ((1 - beta2) * t + 1))
this._eta_ubound = t => this._learningrate * (1 + 1 / ((1 - beta2) * t + 1))
}

set learningRate(value) {
this._learningrate = value
}

manager() {
const this_ = this
return {
get lr() {
return this_._learningrate
},
params: {},
delta(key, value) {
const valueIsNumber = typeof value === 'number'
if (valueIsNumber) {
value = new Matrix(1, 1, value)
}
if (!this.params[key]) {
const z = value.copy()
z.fill(0)
this.params[key] = { m: z.copy(), v: z.copy(), vh: z, t: 1 }
}
this.params[key].m.broadcastOperate(value, (a, b) => a * this_._beta1 + b * (1 - this_._beta1))
this.params[key].v.broadcastOperate(value, (a, b) => a * this_._beta2 + (1 - this_._beta2) * b * b)
this.params[key].vh.broadcastOperate(this.params[key].v, (a, b) => Math.max(a, b))
const eta_lb = this_._eta_lbound(this.params[key].t)
const eta_ub = this_._eta_ubound(this.params[key].t)
const eta = this.params[key].vh.copy()
eta.map(v => Math.min(eta_ub, Math.max(eta_lb, this_._alpha / Math.sqrt(v))))
const ret = this.params[key].m.copy()
ret.broadcastOperate(eta, (a, b) => (a * b) / Math.sqrt(this.params[key].t))
this.params[key].t++
return valueIsNumber ? ret.toScaler() : ret
},
}
}
}
43 changes: 43 additions & 0 deletions lib/model/nns/optimizer/amsgrad.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
import Matrix from '../../../util/matrix.js'

export class AMSGradOptimizer {
constructor(lr = 0.001, beta1 = 0.9, beta2 = 0.999) {
this._learningrate = lr
this._beta1 = beta1
this._beta2 = beta2
this._a = t => this._learningrate / Math.sqrt(t)
}

set learningRate(value) {
this._learningrate = value
}

manager() {
const this_ = this
return {
get lr() {
return this_._learningrate
},
params: {},
delta(key, value) {
const valueIsNumber = typeof value === 'number'
if (valueIsNumber) {
value = new Matrix(1, 1, value)
}
if (!this.params[key]) {
const z = value.copy()
z.fill(0)
this.params[key] = { m: z.copy(), v: z.copy(), vh: z, t: 1 }
}
this.params[key].m.broadcastOperate(value, (a, b) => a * this_._beta1 + b * (1 - this_._beta1))
this.params[key].v.broadcastOperate(value, (a, b) => a * this_._beta2 + b ** 2 * (1 - this_._beta2))
this.params[key].vh.broadcastOperate(this.params[key].v, (a, b) => Math.max(a, b))
const ret = this.params[key].m.copy()
const lr = this_._a(this.params[key].t)
ret.broadcastOperate(this.params[key].vh, (a, b) => (lr * a) / Math.sqrt(b + 1.0e-12))
this.params[key].t++
return valueIsNumber ? ret.toScaler() : ret
},
}
}
}
Loading