Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ for (let i = 0; i < n; i++) {
| feature selection | Mutual information, Ridge, Lasso, Elastic net, Decision tree, NCA |
| transformation | Box-Cox, Yeo-Johnson |
| density estimation | Histogram, Average shifted histogram, Polynomial histogram, Maximum likelihood, Kernel density estimation, k-nearest neighbor, Naive Bayes, GMM, HMM |
| generate | MH, Slice sampling, GMM, GBRBM, HMM, VAE, GAN, NICE |
| generate | MH, Slice sampling, GMM, GBRBM, HMM, VAE, GAN, NICE, Diffusion |
| smoothing | (Linear weighted / Triangular / Cumulative) Moving average, Exponential average, Moving median, KZ filter, Savitzky Golay filter, Hampel filter, Kalman filter, Particle filter, Lowpass filter, Bessel filter, Butterworth filter, Chebyshev filter, Elliptic filter |
| timeseries prediction | Holt winters, AR, ARMA, SDAR, VAR, Kalman filter, MLP, RNN |
| change point detection | Cumulative sum, k-nearest neighbor, LOF, COF, SST, KLIEP, LSIF, uLSIF, LSDD, HMM, Markov switching |
Expand Down
1 change: 1 addition & 0 deletions js/model_selector.js
Original file line number Diff line number Diff line change
Expand Up @@ -484,6 +484,7 @@ const AIMethods = [
{ value: 'vae', title: 'VAE' },
{ value: 'gan', title: 'GAN' },
{ value: 'nice', title: 'NICE' },
{ value: 'diffusion_model', title: 'Diffusion Model' },
],
},
{
Expand Down
72 changes: 72 additions & 0 deletions js/view/diffusion_model.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
import Controller from '../controller.js'
import { BaseWorker } from '../utils.js'

class DiffusionModelWorker extends BaseWorker {
constructor() {
super('js/view/worker/model_worker.js', { type: 'module' })
}

initialize(timesteps) {
return this._postMessage({ name: 'diffusion_model', method: 'constructor', arguments: [timesteps] })
}

epoch() {
return this._postMessage({ name: 'diffusion_model', method: 'epoch' }).then(r => r.data)
}

fit(train_x, iteration, rate, batch) {
return this._postMessage({
name: 'diffusion_model',
method: 'fit',
arguments: [train_x, iteration, rate, batch],
}).then(r => r.data)
}

generate(n) {
return this._postMessage({ name: 'diffusion_model', method: 'generate', arguments: [n] }).then(r => r.data)
}
}

export default function (platform) {
platform.setting.ml.usage =
'Click and add data point. Next, click "Initialize". Finally, click "Fit" button repeatedly.'
const controller = new Controller(platform)
const model = new DiffusionModelWorker()
let epoch = 0

const fitModel = async () => {
if (platform.datas.length === 0) {
return
}
const tx = platform.trainInput
const loss = await model.fit(tx, +iteration.value, rate.value, batch.value)
epoch = await model.epoch()
platform.plotLoss(loss[0])
const gen_data = await model.generate(tx.length)
platform.trainResult = gen_data
}

const genValues = async () => {
const ty = platform.trainOutput
genBtn.element.disabled = true
const gen_data = await model.generate(platform.trainInput.length, ty)
genBtn.element.disabled = false
console.log(gen_data)
platform.trainResult = gen_data
}

const slbConf = controller.stepLoopButtons().init(done => {
model.initialize(100).then(done)
platform.init()
})
const iteration = controller.select({ label: ' Iteration ', values: [1, 10, 100, 1000, 10000] })
iteration.value = 10
const rate = controller.input.number({ label: 'Learning rate ', min: 0, max: 100, step: 0.01, value: 0.01 })
const batch = controller.input.number({ label: ' Batch size ', min: 1, max: 100, value: 10 })
slbConf.step(fitModel).epoch(() => epoch)
const genBtn = controller.input.button('Generate').on('click', genValues)

return () => {
model.terminate()
}
}
176 changes: 176 additions & 0 deletions lib/model/diffusion_model.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,176 @@
import Matrix from '../util/matrix.js'
import Tensor from '../util/tensor.js'
import NeuralNetwork from './neuralnetwork.js'

/**
* Diffusion model network
*/
export default class DiffusionModel {
// https://qiita.com/pocokhc/items/5a015ee5b527a357dd67
/**
* @param {number} timesteps Number of timestep
* @param {LayerObject[]} [layers] Layers
*/
constructor(timesteps, layers) {
this._timesteps = timesteps
this._ulayers = layers
this._peDims = 32

this._model = null
this._epoch = 0

const betaStart = 0.0001
const betaEnd = 0.02
const betaStep = (betaEnd - betaStart) / (this._timesteps - 1)
this._beta = [betaStart]
for (let t = 1; t < this._timesteps - 1; t++) {
this._beta[t] = betaStart + betaStep * t
}
this._beta.push(betaEnd)
this._alpha = [1 - this._beta[0]]
this._alphaCumprod = [this._alpha[0]]
for (let t = 1; t < this._beta.length; t++) {
this._alpha[t] = 1 - this._beta[t]
this._alphaCumprod[t] = this._alphaCumprod[t - 1] * this._alpha[t]
}
}

/**
* Epoch
* @type {number}
*/
get epoch() {
return this._epoch
}

_addNoise(x, t) {
const at = this._alphaCumprod[t]
const sqrtat = Math.sqrt(at)
const sqrt1at = Math.sqrt(1 - at)
const noize = Tensor.randn(x.sizes)
const xNoised = x.copy()
xNoised.broadcastOperate(noize, (a, b) => sqrtat * a + sqrt1at * b)
return [xNoised, noize]
}

_build() {
if (this._dataShape.length === 1) {
this._layers = [
{ type: 'input', name: 'x' },
{ type: 'input', name: 'position_encoding' },
{ type: 'full', out_size: this._peDims, l2_decay: 0.001, activation: 'gelu', name: 'pe' },
{ type: 'concat', input: ['x', 'pe'], axis: 1 },
]
if (this._ulayers) {
this._layers.push(...this._ulayers)
} else {
this._layers.push(
{ type: 'full', out_size: 32, l2_decay: 0.001, name: 'c1', activation: 'tanh' },
{ type: 'full', out_size: 16, l2_decay: 0.001, activation: 'tanh' },
{ type: 'full', out_size: 32, l2_decay: 0.001, name: 'u1', activation: 'tanh' },
{ type: 'concat', input: ['u1', 'c1'], axis: 1 },
{ type: 'full', out_size: 32, l2_decay: 0.001, activation: 'tanh' }
)
}
this._layers.push({ type: 'full', out_size: this._dataShape[0], l2_decay: 0.001 }, { type: 'output' })
} else {
const dim = this._dataShape.length
this._layers = [
{ type: 'input', name: 'x' },
{ type: 'input', name: 'position_encoding' },
{ type: 'full', out_size: this._peDims, l2_decay: 0.001, activation: 'gelu' },
{ type: 'reshape', size: [...Array(dim - 1).fill(1), this._peDims] },
{ type: 'up_sampling', size: this._dataShape.slice(0, dim - 1), name: 'pe' },
{ type: 'concat', input: ['x', 'pe'], axis: dim },
]
if (this._ulayers) {
this._layers.push(...this._ulayers)
} else {
this._layers.push(
{
type: 'conv',
kernel: 3,
channel: 16,
padding: 1,
l2_decay: 0.001,
name: 'c1',
activation: 'relu',
},
{ type: 'max_pool', kernel: 2 },
{ type: 'conv', kernel: 3, channel: 32, padding: 1, l2_decay: 0.001, activation: 'relu' },
{ type: 'up_sampling', size: 2, name: 'u1' },
{ type: 'concat', input: ['u1', 'c1'], axis: dim },
{ type: 'conv', kernel: 3, channel: 16, padding: 1, l2_decay: 0.001, activation: 'relu' }
)
}
this._layers.push(
{ type: 'conv', kernel: 1, channel: this._dataShape[dim - 1], l2_decay: 0.001 },
{ type: 'output' }
)
}

return NeuralNetwork.fromObject(this._layers, 'mse', 'adam')
}

_positionEncoding(t, embdims) {
const rates = Array.from({ length: embdims }, (_, i) => t / 10000 ** (2 * Math.floor(i / 2)) / embdims)
const pe = rates.map((v, i) => (i % 2 === 0 ? Math.sin(v) : Math.cos(v)))
return new Matrix(1, embdims, pe)
}

/**
* Fit model.
* @param {Array<Array<number>>} train_x Training data
* @param {number} iteration Iteration count
* @param {number} rate Learning rate
* @param {number} batch Batch size
* @returns {{labeledLoss: number, unlabeledLoss: number}} Loss value
*/
fit(train_x, iteration, rate, batch) {
const x = Tensor.fromArray(train_x)
this._dataShape = x.sizes.slice(1)
if (!this._model) {
this._model = this._build()
}
let loss = null
for (let i = 0; i < iteration; i++) {
const t = Math.floor(Math.random() * this._timesteps)
const pe = this._positionEncoding(t, this._peDims)
pe.repeat(x.sizes[0], 0)
const [noised_x, noise] = this._addNoise(x, t)

loss = this._model.fit({ x: noised_x, position_encoding: pe }, Tensor.fromArray(noise), 1, rate, batch)
}
this._epoch += iteration
return loss
}

/**
* Returns generated data from the model.
* @param {number} n Number of generated data
* @returns {Array<Array<number>>} Generated values
*/
generate(n) {
const ds = this._dataShape.concat()
const samples = Tensor.randn([n, ...ds])
for (let t = this._timesteps - 1; t >= 0; t--) {
const pe = this._positionEncoding(t, this._peDims)
pe.repeat(n, 0)

const pred = this._model.calc({ x: samples, position_encoding: pe })

samples.broadcastOperate(
pred,
(a, b) =>
(1 / Math.sqrt(this._alpha[t])) * (a - (b * this._beta[t]) / Math.sqrt(1 - this._alphaCumprod[t]))
)
if (t > 0) {
const s2 = ((1 - this._alphaCumprod[t - 1]) / (1 - this._alphaCumprod[t])) * this._beta[t]
const noise = Tensor.randn(samples.sizes, 0, s2)
samples.broadcastOperate(noise, (a, b) => a + b)
}
}

return samples.toArray()
}
}
9 changes: 7 additions & 2 deletions lib/model/nns/layer/full.js
Original file line number Diff line number Diff line change
Expand Up @@ -93,15 +93,20 @@ export default class FullyConnected extends Layer {
i = this._i.copy()
i.reshape(-1, this._w.rows)
i = i.toMatrix()
} else if (!(this._i instanceof Matrix)) {
i = i.toMatrix()
}
let b = bo
if (b.dimension !== 2) {
b = bo.copy()
b.reshape(-1, this._w.cols)
b = b.toMatrix()
} else if (!(b instanceof Matrix)) {
b = b.toMatrix()
}
const n = this._i.sizes[0]
this._dw = i.tDot(b)
this._dw.div(this._i.rows)
this._dw.div(n)
if (this._l2_decay > 0 || this._l1_decay > 0) {
for (let i = 0; i < this._dw.rows; i++) {
for (let j = 0; j < this._dw.cols; j++) {
Expand All @@ -111,7 +116,7 @@ export default class FullyConnected extends Layer {
}
}
this._db = b.sum(0)
this._db.div(this._i.rows)
this._db.div(n)

this._bi = bo.dot(this._w.t)
if (this._wname || this._bname) {
Expand Down
15 changes: 15 additions & 0 deletions lib/util/matrix.js
Original file line number Diff line number Diff line change
Expand Up @@ -461,6 +461,21 @@ export default class Matrix {
return s + ']'
}

_to_position(...i) {
let p = 0
for (let d = 0; d < this.dimension; d++) {
if (i[d] < 0 || this._size[d] <= i[d]) {
throw new MatrixException('Index out of bounds.')
}
p = p * this._size[d] + i[d]
}
return p
}

_to_index(p) {
return [Math.floor(p / this._size[1]), p % this._size[1]]
}

/**
* Returns a copy of this matrix.
* @param {Matrix<T>} [dst] Destination matrix
Expand Down
52 changes: 52 additions & 0 deletions tests/gui/view/diffusion_model.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
import { getPage } from '../helper/browser'

describe('generate', () => {
/** @type {Awaited<ReturnType<getPage>>} */
let page
beforeEach(async () => {
page = await getPage()
const clusters = page.locator('#data_menu input[name=n]')
await clusters.fill('1')
const resetDataButton = page.locator('#data_menu input[value=Reset]')
await resetDataButton.dispatchEvent('click')
const taskSelectBox = page.locator('#ml_selector dl:first-child dd:nth-child(5) select')
await taskSelectBox.selectOption('GR')
const modelSelectBox = page.locator('#ml_selector .model_selection #mlDisp')
await modelSelectBox.selectOption('diffusion_model')
})

afterEach(async () => {
await page?.close()
})

test('initialize', async () => {
const methodMenu = page.locator('#ml_selector #method_menu')
const buttons = methodMenu.locator('.buttons')

const iteration = buttons.locator('select:nth-of-type(1)')
await expect(iteration.inputValue()).resolves.toBe('10')
const rate = buttons.locator('input:nth-of-type(2)')
await expect(rate.inputValue()).resolves.toBe('0.01')
const batch = buttons.locator('input:nth-of-type(3)')
await expect(batch.inputValue()).resolves.toBe('10')
})

test('learn', async () => {
const methodMenu = page.locator('#ml_selector #method_menu')
const buttons = methodMenu.locator('.buttons')

const epoch = buttons.locator('[name=epoch]')
await expect(epoch.textContent()).resolves.toBe('0')
const methodFooter = page.locator('#method_footer', { state: 'attached' })
await expect(methodFooter.textContent()).resolves.toBe('')

const initButton = buttons.locator('input[value=Initialize]')
await initButton.dispatchEvent('click')
const stepButton = buttons.locator('input[value=Step]:enabled')
await stepButton.dispatchEvent('click')
await buttons.locator('input[value=Step]:enabled').waitFor()

await expect(epoch.textContent()).resolves.toBe('10')
await expect(methodFooter.textContent()).resolves.toMatch(/^loss/)
})
})
Loading