Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 12 additions & 1 deletion js/neuralnetwork_builder.js
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,18 @@ export default class NeuralNetworkBuilder {
this._opt = r.append('select').attr('name', 'optimizer')
this._opt
.selectAll('option')
.data(['sgd', 'adam', 'momentum', 'rmsprop'])
.data([
'sgd',
'adam',
'momentum',
'adagrad',
'rmsprop',
'adadelta',
'rmspropgraves',
'smorms3',
'adamax',
'nadam',
])
.enter()
.append('option')
.property('value', d => d)
Expand Down
16 changes: 15 additions & 1 deletion js/view/ladder_network.js
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,21 @@ export default function (platform) {
],
})

const optimizer = controller.select({ label: ' Optimizer ', values: ['sgd', 'adam', 'momentum', 'rmsprop'] })
const optimizer = controller.select({
label: ' Optimizer ',
values: [
'sgd',
'adam',
'momentum',
'adagrad',
'rmsprop',
'adadelta',
'rmspropgraves',
'smorms3',
'adamax',
'nadam',
],
})
const slbConf = controller.stepLoopButtons().init(done => {
if (platform.datas.length === 0) {
done()
Expand Down
26 changes: 19 additions & 7 deletions lib/model/neuralnetwork.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ export { default as Layer } from './nns/layer/base.js'
import ComputationalGraph from './nns/graph.js'
export { default as ComputationalGraph } from './nns/graph.js'

import { SGDOptimizer, MomentumOptimizer, RMSPropOptimizer, AdamOptimizer } from './nns/optimizer.js'
import * as opt from './nns/optimizer.js'

/**
* @ignore
Expand Down Expand Up @@ -35,7 +35,7 @@ export default class NeuralNetwork {
* Returns neuralnetwork.
* @param {LayerObject[]} layers Network layers
* @param {string} [loss] Loss name
* @param {'sgd' | 'adam' | 'momentum' | 'rmsprop'} [optimizer] Optimizer of the network
* @param {'sgd' | 'adam' | 'momentum' | 'adagrad' | 'rmsprop' | 'adadelta' | 'rmspropgraves' | 'smorms3' | 'adamax' | 'nadam'} [optimizer] Optimizer of the network
* @returns {NeuralNetwork} Created Neuralnetwork
*/
static fromObject(layers, loss, optimizer = 'sgd') {
Expand Down Expand Up @@ -84,19 +84,31 @@ export default class NeuralNetwork {

/**
* @param {ComputationalGraph} graph Graph of a network
* @param {'sgd' | 'adam' | 'momentum' | 'rmsprop'} [optimizer] Optimizer of the network
* @param {'sgd' | 'adam' | 'momentum' | 'adagrad' | 'rmsprop' | 'adadelta' | 'rmspropgraves' | 'smorms3' | 'adamax' | 'nadam'} [optimizer] Optimizer of the network
*/
constructor(graph, optimizer = 'sgd') {
this._graph = graph
this._optimizer = optimizer
if (optimizer === 'adam') {
this._opt = new AdamOptimizer()
this._opt = new opt.AdamOptimizer()
} else if (optimizer === 'momentum') {
this._opt = new MomentumOptimizer()
this._opt = new opt.MomentumOptimizer()
} else if (optimizer === 'adagrad') {
this._opt = new opt.AdaGradOptimizer()
} else if (optimizer === 'rmsprop') {
this._opt = new RMSPropOptimizer()
this._opt = new opt.RMSPropOptimizer()
} else if (optimizer === 'adadelta') {
this._opt = new opt.AdaDeltaOptimizer()
} else if (optimizer === 'rmspropgraves') {
this._opt = new opt.RMSPropGravesOptimizer()
} else if (optimizer === 'smorms3') {
this._opt = new opt.SMORMS3Optimizer()
} else if (optimizer === 'adamax') {
this._opt = new opt.AdaMaxOptimizer()
} else if (optimizer === 'nadam') {
this._opt = new opt.NadamOptimizer()
} else {
this._opt = new SGDOptimizer()
this._opt = new opt.SGDOptimizer()
}
this._opt_managers = []
for (let i = 0; i < this._graph.size; i++) {
Expand Down
148 changes: 10 additions & 138 deletions lib/model/nns/optimizer.js
Original file line number Diff line number Diff line change
@@ -1,138 +1,10 @@
import Matrix from '../../util/matrix.js'

export class SGDOptimizer {
constructor(lr) {
this._learningrate = lr
}

set learningRate(value) {
this._learningrate = value
}

manager() {
const this_ = this
return {
get lr() {
return this_._learningrate
},
delta(key, value) {
if (typeof value === 'number') {
return value * this.lr
}
const v = value.copy()
v.map(v => v * this.lr)
return v
},
}
}
}

export class MomentumOptimizer {
constructor(lr, beta = 0.9) {
this._learningrate = lr
this._beta = beta
}

set learningRate(value) {
this._learningrate = value
}

manager() {
const this_ = this
return {
get lr() {
return this_._learningrate
},
params: {},
delta(key, value) {
if (typeof value === 'number') {
this.params[key] = (this.params[key] ?? value) * this_._beta + value * (1 - this_._beta)
return this.params[key] * this.lr
}
if (!this.params[key]) {
this.params[key] = value.copy()
}
this.params[key].broadcastOperate(value, (a, b) => a * this_._beta + b * (1 - this_._beta))
const ret = this.params[key].copy()
ret.map(v => v * this.lr)
return ret
},
}
}
}

export class RMSPropOptimizer {
constructor(lr, beta = 0.999) {
this._learningrate = lr
this._beta = beta
}

set learningRate(value) {
this._learningrate = value
}

manager() {
const this_ = this
return {
get lr() {
return this_._learningrate
},
params: {},
delta(key, value) {
if (typeof value === 'number') {
this.params[key] = (this.params[key] ?? value ** 2) * this_._beta + (1 - this_._beta) * value ** 2
return (this.lr * value) / Math.sqrt(this.params[key] + 1.0e-12)
}
if (!this.params[key]) {
this.params[key] = value.copy()
this.params[key].map(v => v ** 2)
}
this.params[key].broadcastOperate(value, (a, b) => a * this_._beta + (1 - this_._beta) * b * b)
const ret = value.copy()
ret.broadcastOperate(this.params[key], (a, b) => a * (this.lr / Math.sqrt(b + 1.0e-12)))
return ret
},
}
}
}

export class AdamOptimizer {
constructor(lr = 0.001, beta1 = 0.9, beta2 = 0.999) {
this._learningrate = lr
this._beta1 = beta1
this._beta2 = beta2
}

set learningRate(value) {
this._learningrate = value
}

manager() {
const this_ = this
return {
get lr() {
return this_._learningrate
},
params: {},
delta(key, value) {
const valueIsNumber = typeof value === 'number'
if (valueIsNumber) {
value = new Matrix(1, 1, value)
}
if (!this.params[key]) {
const s = value.copy()
s.map(v => v ** 2)
this.params[key] = {
v: value.copy(),
s: s,
}
}
this.params[key].v.broadcastOperate(value, (a, b) => a * this_._beta1 + b * (1 - this_._beta1))
this.params[key].s.broadcastOperate(value, (a, b) => a * this_._beta2 + (1 - this_._beta2) * b * b)
const ret = this.params[key].v.copy()
ret.broadcastOperate(this.params[key].s, (a, b) => a * (this.lr / Math.sqrt(b + 1.0e-12)))
return valueIsNumber ? ret.toScaler() : ret
},
}
}
}
export { SGDOptimizer } from './optimizer/sgd.js'
export { MomentumOptimizer } from './optimizer/momentum.js'
export { AdaGradOptimizer } from './optimizer/adagrad.js'
export { RMSPropOptimizer } from './optimizer/rmsprop.js'
export { AdaDeltaOptimizer } from './optimizer/adadelta.js'
export { AdamOptimizer } from './optimizer/adam.js'
export { RMSPropGravesOptimizer } from './optimizer/rmspropgraves.js'
export { SMORMS3Optimizer } from './optimizer/smorms3.js'
export { AdaMaxOptimizer } from './optimizer/adamax.js'
export { NadamOptimizer } from './optimizer/nadam.js'
40 changes: 40 additions & 0 deletions lib/model/nns/optimizer/adadelta.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
import Matrix from '../../../util/matrix.js'

export class AdaDeltaOptimizer {
constructor(lr, beta = 0.95) {
this._learningrate = lr
this._beta = beta
}

set learningRate(value) {
this._learningrate = value
}

manager() {
const this_ = this
return {
get lr() {
return this_._learningrate
},
params: {},
delta(key, value) {
const valueIsNumber = typeof value === 'number'
if (valueIsNumber) {
value = new Matrix(1, 1, value)
}
if (!this.params[key]) {
const z = value.copy()
z.fill(0)
this.params[key] = { v: z.copy(), u: z }
}
this.params[key].v.broadcastOperate(value, (a, b) => a * this_._beta + (1 - this_._beta) * b ** 2)
const ret = this.params[key].v.copy()
ret.broadcastOperate(this.params[key].u, (a, b) => Math.sqrt(b + 1.0e-12) / Math.sqrt(a + 1.0e-12))
ret.broadcastOperate(value, (a, b) => a * b)
this.params[key].u.broadcastOperate(ret, (a, b) => a * this_._beta + (1 - this_._beta) * b ** 2)
ret.map(v => this.lr * v)
return valueIsNumber ? ret.toScaler() : ret
},
}
}
}
33 changes: 33 additions & 0 deletions lib/model/nns/optimizer/adagrad.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
export class AdaGradOptimizer {
constructor(lr) {
this._learningrate = lr
}

set learningRate(value) {
this._learningrate = value
}

manager() {
const this_ = this
return {
get lr() {
return this_._learningrate
},
params: {},
delta(key, value) {
if (typeof value === 'number') {
this.params[key] = (this.params[key] ?? 0) + value ** 2
return (this.lr * value) / Math.sqrt(this.params[key] + 1.0e-12)
}
if (!this.params[key]) {
this.params[key] = value.copy()
this.params[key].fill(0)
}
this.params[key].broadcastOperate(value, (a, b) => a + b * b)
const ret = value.copy()
ret.broadcastOperate(this.params[key], (a, b) => a * (this.lr / Math.sqrt(b + 1.0e-12)))
return ret
},
}
}
}
42 changes: 42 additions & 0 deletions lib/model/nns/optimizer/adam.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
import Matrix from '../../../util/matrix.js'

export class AdamOptimizer {
constructor(lr = 0.001, beta1 = 0.9, beta2 = 0.999) {
this._learningrate = lr
this._beta1 = beta1
this._beta2 = beta2
}

set learningRate(value) {
this._learningrate = value
}

manager() {
const this_ = this
return {
get lr() {
return this_._learningrate
},
params: {},
delta(key, value) {
const valueIsNumber = typeof value === 'number'
if (valueIsNumber) {
value = new Matrix(1, 1, value)
}
if (!this.params[key]) {
const z = value.copy()
z.fill(0)
this.params[key] = { v: z.copy(), s: z, c: 1 }
}
this.params[key].v.broadcastOperate(value, (a, b) => a * this_._beta1 + b * (1 - this_._beta1))
this.params[key].s.broadcastOperate(value, (a, b) => a * this_._beta2 + (1 - this_._beta2) * b * b)
const nv = 1 - this_._beta1 ** this.params[key].c
const ns = 1 - this_._beta2 ** this.params[key].c
const ret = this.params[key].v.copy()
ret.broadcastOperate(this.params[key].s, (a, b) => (a / nv) * (this.lr / Math.sqrt(b / ns + 1.0e-12)))
this.params[key].c++
return valueIsNumber ? ret.toScaler() : ret
},
}
}
}
Loading