From c1aba51a4229f50f39a4149f902f0274dc93167f Mon Sep 17 00:00:00 2001 From: ishii-norimi Date: Tue, 19 Mar 2024 20:47:24 +0900 Subject: [PATCH 1/2] Remove kernel argument and accept object for kernel argument --- js/view/gplvm.js | 83 ++++++----------------------------- js/view/ocsvm.js | 6 +-- js/view/pca.js | 8 +--- js/view/s3vm.js | 6 +-- js/view/svc.js | 6 +-- js/view/svm.js | 6 +-- js/view/svr.js | 6 +-- lib/model/gplvm.js | 10 ++--- lib/model/ocsvm.js | 14 ++++-- lib/model/pca.js | 14 ++++-- lib/model/s3vm.js | 14 ++++-- lib/model/svc.js | 14 ++++-- lib/model/svm.js | 14 ++++-- lib/model/svr.js | 14 ++++-- tests/gui/view/gplvm.test.js | 57 ++++++++++++++++++++++++ tests/lib/model/gplvm.test.js | 41 ++++++++++++----- tests/lib/model/ocsvm.test.js | 8 ++-- tests/lib/model/pca.test.js | 66 ++++++++++++++-------------- tests/lib/model/s3vm.test.js | 2 +- tests/lib/model/svc.test.js | 8 ++-- tests/lib/model/svm.test.js | 37 +++++++--------- tests/lib/model/svr.test.js | 7 +-- 22 files changed, 231 insertions(+), 210 deletions(-) create mode 100644 tests/gui/view/gplvm.test.js diff --git a/js/view/gplvm.js b/js/view/gplvm.js index e0cd47fbf..c8e524ce1 100644 --- a/js/view/gplvm.js +++ b/js/view/gplvm.js @@ -1,18 +1,18 @@ import GPLVM from '../../lib/model/gplvm.js' import Controller from '../controller.js' -var dispGPLVM = function (elm, platform) { +export default function (platform) { + platform.setting.ml.usage = 'Click and add data point. Next, click "Fit" button.' const controller = new Controller(platform) let model = null const fitModel = () => { if (!model) { const dim = platform.dimension - const alpha = +elm.select('[name=alpha]').property('value') - const sigma = +elm.select('[name=sigma]').property('value') - const ez = +elm.select('[name=ez]').property('value') - const ea = +elm.select('[name=ea]').property('value') - const ep = +elm.select('[name=ep]').property('value') - model = new GPLVM(dim, alpha, ez, ea, ep, 'gaussian', [1.0, sigma]) + model = new GPLVM(dim, alpha.value, ez.value, ea.value, ep.value, { + name: kernel.value, + a: 1.0, + b: gauss_sigma.avlue, + }) model.init(platform.trainInput) } model.fit() @@ -20,63 +20,13 @@ var dispGPLVM = function (elm, platform) { platform.trainResult = y } - const kernelElm = elm.append('span') - kernelElm - .append('select') - .attr('name', 'kernel') - .selectAll('option') - .data(['gaussian']) - .enter() - .append('option') - .attr('value', d => d) - .text(d => d) - const gauss_sigma = kernelElm.append('span') - gauss_sigma - .append('span') - .text(' sigma = ') - .append('input') - .attr('type', 'number') - .attr('name', 'sigma') - .attr('value', 1) - .attr('min', 0) - .attr('max', 10) - .attr('step', 0.1) - elm.append('span') - .text(' alpha = ') - .append('input') - .attr('type', 'number') - .attr('name', 'alpha') - .attr('value', 0.05) - .attr('min', 0) - .attr('max', 10) - .attr('step', 0.01) - elm.append('span') - .text(' ez = ') - .append('input') - .attr('type', 'number') - .attr('name', 'ez') - .attr('value', 1) - .attr('min', 0) - .attr('max', 10) - .attr('step', 0.1) - elm.append('span') - .text(' ea = ') - .append('input') - .attr('type', 'number') - .attr('name', 'ea') - .attr('value', 0.005) - .attr('min', 0) - .attr('max', 10) - .attr('step', 0.001) - elm.append('span') - .text(' ep = ') - .append('input') - .attr('type', 'number') - .attr('name', 'ep') - .attr('value', 0.02) - .attr('min', 0) - .attr('max', 10) - .attr('step', 0.001) + const kernelElm = controller.span() + const kernel = kernelElm.select(['gaussian']) + const gauss_sigma = kernelElm.input.number({ label: ' sigma = ', min: 0, max: 10, step: 0.1, value: 1 }) + const alpha = controller.input.number({ label: ' alpha = ', min: 0, max: 10, step: 0.01, value: 0.05 }) + const ez = controller.input.number({ label: ' ez = ', min: 0, max: 10, step: 0.1, value: 1 }) + const ea = controller.input.number({ label: ' ea = ', min: 0, max: 10, step: 0.001, value: 0.005 }) + const ep = controller.input.number({ label: ' ep = ', min: 0, max: 10, step: 0.001, value: 0.02 }) controller .stepLoopButtons() .init(() => { @@ -86,8 +36,3 @@ var dispGPLVM = function (elm, platform) { .step(fitModel) .epoch() } - -export default function (platform) { - platform.setting.ml.usage = 'Click and add data point. Next, click "Fit" button.' - dispGPLVM(platform.setting.ml.configElement, platform) -} diff --git a/js/view/ocsvm.js b/js/view/ocsvm.js index 11940dd39..c21a66976 100644 --- a/js/view/ocsvm.js +++ b/js/view/ocsvm.js @@ -50,11 +50,7 @@ export default function (platform) { step: 0.01, }) const slbConf = controller.stepLoopButtons().init(() => { - const args = [] - if (kernel.value === 'gaussian') { - args.push(gamma.value) - } - model = new OCSVM(nu.value, kernel.value, args) + model = new OCSVM(nu.value, { name: kernel.value, d: gamma.value }) model.init(platform.trainInput, platform.trainOutput) learn_epoch = 0 platform.init() diff --git a/js/view/pca.js b/js/view/pca.js index 851332447..37d0d7793 100644 --- a/js/view/pca.js +++ b/js/view/pca.js @@ -13,13 +13,7 @@ export default function (platform) { } else if (type.value === 'dual') { model = new DualPCA() } else { - const args = [] - if (kernel.value === 'polynomial') { - args.push(poly_d.value) - } else if (kernel.value === 'gaussian') { - args.push(sigma.value) - } - model = new KernelPCA(kernel.value, args) + model = new KernelPCA({ name: kernel.value, sigma: sigma.value, n: poly_d.value }) } model.fit(platform.trainInput) const y = model.predict(platform.trainInput, dim) diff --git a/js/view/s3vm.js b/js/view/s3vm.js index b71db9875..32b75782f 100644 --- a/js/view/s3vm.js +++ b/js/view/s3vm.js @@ -34,11 +34,7 @@ export default function (platform) { step: 0.01, }) const slbConf = controller.stepLoopButtons().init(() => { - const kernel_args = [] - if (kernel.value === 'gaussian') { - kernel_args.push(gamma.value) - } - model = new S3VM(kernel.value, kernel_args) + model = new S3VM({ name: kernel.value, d: gamma.value }) model.init( platform.trainInput, platform.trainOutput.map(v => (v[0] == null ? null : v[0] === 1 ? -1 : 1)) diff --git a/js/view/svc.js b/js/view/svc.js index 2e5b77c76..b889e5857 100644 --- a/js/view/svc.js +++ b/js/view/svc.js @@ -35,11 +35,7 @@ export default function (platform) { controller .stepLoopButtons() .init(() => { - const kernel_args = [] - if (kernel.value === 'gaussian') { - kernel_args.push(gamma.value) - } - model = new SVC(kernel.value, kernel_args) + model = new SVC({ name: kernel.value, d: gamma.value }) model.init(platform.trainInput) platform.init() }) diff --git a/js/view/svm.js b/js/view/svm.js index 879d7cf3f..57cea4b09 100644 --- a/js/view/svm.js +++ b/js/view/svm.js @@ -41,12 +41,8 @@ export default function (platform) { step: 0.01, }) const slbConf = controller.stepLoopButtons().init(() => { - const kernel_args = [] - if (kernel.value === 'gaussian') { - kernel_args.push(gamma.value) - } model = new EnsembleBinaryModel(function () { - return new SVM(kernel.value, kernel_args) + return new SVM({ name: kernel.value, d: gamma.value }) }, method.value) model.init( platform.trainInput, diff --git a/js/view/svr.js b/js/view/svr.js index 29f625498..851a5fd03 100644 --- a/js/view/svr.js +++ b/js/view/svr.js @@ -31,11 +31,7 @@ export default function (platform) { step: 0.1, }) const slbConf = controller.stepLoopButtons().init(() => { - const args = [] - if (kernel.value === 'gaussian') { - args.push(gamma.value) - } - model = new SVR(kernel.value, args) + model = new SVR({ name: kernel.value, d: gamma.value }) model.init(platform.trainInput, platform.trainOutput) learn_epoch = 0 platform.init() diff --git a/lib/model/gplvm.js b/lib/model/gplvm.js index 5882c3dbc..962b05a5f 100644 --- a/lib/model/gplvm.js +++ b/lib/model/gplvm.js @@ -84,15 +84,15 @@ export default class GPLVM { * @param {number} [ez=1.0] Learning rate for z * @param {number} [ea=0.005] Learning rate for alpha * @param {number} [ep=0.2] Learning rate for kernel - * @param {'gaussian'} [kernel=gaussian] Kernel name - * @param {*[]} [kernelArgs] Arguments for kernel + * @param {'gaussian' | { name: 'gaussian', a?: number, b?: number}} [kernel=gaussian] Kernel name */ - constructor(rd, alpha, ez = 1, ea = 0.005, ep = 0.2, kernel = 'gaussian', kernelArgs = []) { + constructor(rd, alpha, ez = 1, ea = 0.005, ep = 0.2, kernel = 'gaussian') { this._rd = rd this._alpha = alpha - if (kernel === 'gaussian') { - this._kernel = new GaussianKernel(...kernelArgs, ep) + if (typeof kernel === 'string') { + kernel = { name: kernel } } + this._kernel = new GaussianKernel(kernel.a, kernel.b, ep) this._ez = ez this._ea = ea } diff --git a/lib/model/ocsvm.js b/lib/model/ocsvm.js index 042061010..648b09acd 100644 --- a/lib/model/ocsvm.js +++ b/lib/model/ocsvm.js @@ -17,14 +17,20 @@ export default class OCSVM { // http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.39.9421&rep=rep1&type=pdf /** * @param {number} nu Nu - * @param {'gaussian' | 'linear' | function (number[], number[]): number} kernel Kernel name - * @param {*[]} [kernelArgs] Arguments for kernel + * @param {'gaussian' | 'linear' | { name: 'gaussian', d?: number } | { name: 'linear' } | function (number[], number[]): number} kernel Kernel name */ - constructor(nu, kernel, kernelArgs = []) { + constructor(nu, kernel) { if (typeof kernel === 'function') { this._kernel = kernel } else { - this._kernel = Kernel[kernel](...kernelArgs) + if (typeof kernel === 'string') { + kernel = { name: kernel } + } + if (kernel.name === 'gaussian') { + this._kernel = Kernel.gaussian(kernel.d) + } else { + this._kernel = Kernel.linear() + } } this._nu = nu diff --git a/lib/model/pca.js b/lib/model/pca.js index 025e59c47..4dc97623d 100644 --- a/lib/model/pca.js +++ b/lib/model/pca.js @@ -95,14 +95,20 @@ export class DualPCA { export class KernelPCA { // https://axa.biopapyrus.jp/machine-learning/preprocessing/kernel-pca.html /** - * @param {'gaussian' | 'polynomial' | function (number[], number[]): number} kernel Kernel name - * @param {*[]} [kernelArgs] Arguments for kernel + * @param {'gaussian' | 'polynomial' | { name: 'gaussian', sigma?: number } | { name: 'polynomial', n?: number } | function (number[], number[]): number} kernel Kernel name */ - constructor(kernel, kernelArgs = []) { + constructor(kernel) { if (typeof kernel === 'function') { this._kernel = (a, b) => kernel(a.value, b.value) } else { - this._kernel = Kernel[kernel](...kernelArgs) + if (typeof kernel === 'string') { + kernel = { name: kernel } + } + if (kernel.name === 'gaussian') { + this._kernel = Kernel.gaussian(kernel.sigma) + } else { + this._kernel = Kernel.polynomial(kernel.n) + } } } diff --git a/lib/model/s3vm.js b/lib/model/s3vm.js index 68e4846a9..5fcdc70bd 100644 --- a/lib/model/s3vm.js +++ b/lib/model/s3vm.js @@ -18,10 +18,9 @@ export default class S3VM { // https://is.mpg.de/fileadmin/user_upload/files/publications/SSL-spam_4162[0].pdf // http://www.fabiangieseke.de/pdfs/neucom2013_draft.pdf /** - * @param {'gaussian' | 'linear' | function (number[], number[]): number} kernel Kernel name - * @param {*[]} [kernelArgs] Arguments for kernel + * @param {'gaussian' | 'linear' | { name: 'gaussian', d?: number } | { name: 'linear' } | function (number[], number[]): number} kernel Kernel name */ - constructor(kernel, kernelArgs = []) { + constructor(kernel) { this._b = 0 this._s = 3 this._gammas = null @@ -32,7 +31,14 @@ export default class S3VM { if (typeof kernel === 'function') { this._kernel = kernel } else { - this._kernel = Kernel[kernel](...kernelArgs) + if (typeof kernel === 'string') { + kernel = { name: kernel } + } + if (kernel.name === 'gaussian') { + this._kernel = Kernel.gaussian(kernel.d) + } else { + this._kernel = Kernel.linear() + } } } diff --git a/lib/model/svc.js b/lib/model/svc.js index d0d966017..edf4bcd6a 100644 --- a/lib/model/svc.js +++ b/lib/model/svc.js @@ -15,14 +15,20 @@ export default class SVC { // https://dl.acm.org/doi/pdf/10.5555/944790.944807 // https://github.com/josiahw/SimpleSVClustering /** - * @param {'gaussian' | 'linear' | function (number[], number[]): number} kernel Kernel name - * @param {*[]} [kernelArgs] Arguments for kernel + * @param {'gaussian' | 'linear' | { name: 'gaussian', d?: number } | { name: 'linear' } | function (number[], number[]): number} kernel Kernel name */ - constructor(kernel, kernelArgs = []) { + constructor(kernel) { if (typeof kernel === 'function') { this._kernel = kernel } else { - this._kernel = Kernel[kernel](...kernelArgs) + if (typeof kernel === 'string') { + kernel = { name: kernel } + } + if (kernel.name === 'gaussian') { + this._kernel = Kernel.gaussian(kernel.d) + } else { + this._kernel = Kernel.linear() + } } this._C = 1 diff --git a/lib/model/svm.js b/lib/model/svm.js index fd77dc021..69d7b4651 100644 --- a/lib/model/svm.js +++ b/lib/model/svm.js @@ -13,10 +13,9 @@ const Kernel = { */ export default class SVM { /** - * @param {'gaussian' | 'linear' | function (number[], number[]): number} kernel Kernel name - * @param {*[]} [kernelArgs] Arguments for kernel + * @param {'gaussian' | 'linear' | { name: 'gaussian', d?: number } | { name: 'linear' } | function (number[], number[]): number} kernel Kernel name */ - constructor(kernel, kernelArgs = []) { + constructor(kernel) { this._n = 0 this._a = [] this._x = [] @@ -31,7 +30,14 @@ export default class SVM { if (typeof kernel === 'function') { this._kernel = kernel } else { - this._kernel = Kernel[kernel](...kernelArgs) + if (typeof kernel === 'string') { + kernel = { name: kernel } + } + if (kernel.name === 'gaussian') { + this._kernel = Kernel.gaussian(kernel.d) + } else { + this._kernel = Kernel.linear() + } } } diff --git a/lib/model/svr.js b/lib/model/svr.js index 581657dc0..7fc1c0b20 100644 --- a/lib/model/svr.js +++ b/lib/model/svr.js @@ -14,14 +14,20 @@ const Kernel = { export default class SVR { // https://jp.mathworks.com/matlabcentral/fileexchange/79790-sequential-minimal-optimization-smo-for-svr /** - * @param {'gaussian' | 'linear' | function (number[], number[]): number} kernel Kernel name - * @param {*[]} [kernelArgs] Arguments for kernel + * @param {'gaussian' | 'linear' | { name: 'gaussian', d?: number } | { name: 'linear' } | function (number[], number[]): number} kernel Kernel name */ - constructor(kernel, kernelArgs = []) { + constructor(kernel) { if (typeof kernel === 'function') { this._kernel = kernel } else { - this._kernel = Kernel[kernel](...kernelArgs) + if (typeof kernel === 'string') { + kernel = { name: kernel } + } + if (kernel.name === 'gaussian') { + this._kernel = Kernel.gaussian(kernel.d) + } else { + this._kernel = Kernel.linear() + } } this._C = 1 diff --git a/tests/gui/view/gplvm.test.js b/tests/gui/view/gplvm.test.js new file mode 100644 index 000000000..e1dd07900 --- /dev/null +++ b/tests/gui/view/gplvm.test.js @@ -0,0 +1,57 @@ +import { getPage } from '../helper/browser' + +describe('dimensionality reduction', () => { + /** @type {Awaited>} */ + let page + beforeEach(async () => { + page = await getPage() + }) + + afterEach(async () => { + await page?.close() + }) + + test('initialize', async () => { + const taskSelectBox = await page.waitForSelector('#ml_selector dl:first-child dd:nth-child(5) select') + await taskSelectBox.selectOption('DR') + const modelSelectBox = await page.waitForSelector('#ml_selector .model_selection #mlDisp') + await modelSelectBox.selectOption('gplvm') + const methodMenu = await page.waitForSelector('#ml_selector #method_menu') + const buttons = await methodMenu.waitForSelector('.buttons') + + const kernel = await buttons.waitForSelector('select:nth-of-type(1)') + await expect((await kernel.getProperty('value')).jsonValue()).resolves.toBe('gaussian') + + await buttons.waitForSelector('input') + const inputs = await buttons.$$('input') + const sigma = inputs[0] + await expect((await sigma.getProperty('value')).jsonValue()).resolves.toBe('1') + const alpha = inputs[1] + await expect((await alpha.getProperty('value')).jsonValue()).resolves.toBe('0.05') + const ez = inputs[2] + await expect((await ez.getProperty('value')).jsonValue()).resolves.toBe('1') + const ea = inputs[3] + await expect((await ea.getProperty('value')).jsonValue()).resolves.toBe('0.005') + const ep = inputs[4] + await expect((await ep.getProperty('value')).jsonValue()).resolves.toBe('0.02') + }) + + test('learn', async () => { + const taskSelectBox = await page.waitForSelector('#ml_selector dl:first-child dd:nth-child(5) select') + await taskSelectBox.selectOption('DR') + const modelSelectBox = await page.waitForSelector('#ml_selector .model_selection #mlDisp') + await modelSelectBox.selectOption('gplvm') + const methodMenu = await page.waitForSelector('#ml_selector #method_menu') + const buttons = await methodMenu.waitForSelector('.buttons') + + const initButton = await buttons.waitForSelector('input[value=Initialize]') + await initButton.evaluate(el => el.click()) + const stepButton = await buttons.waitForSelector('input[value=Step]:enabled') + await stepButton.evaluate(el => el.click()) + + const svg = await page.waitForSelector('#plot-area svg') + await svg.waitForSelector('.tile circle') + const circles = await svg.$$('.tile circle') + expect(circles).toHaveLength(300) + }, 60000) +}) diff --git a/tests/lib/model/gplvm.test.js b/tests/lib/model/gplvm.test.js index e3565443c..0d18757ea 100644 --- a/tests/lib/model/gplvm.test.js +++ b/tests/lib/model/gplvm.test.js @@ -3,19 +3,36 @@ import GPLVM from '../../../lib/model/gplvm.js' import { coRankingMatrix } from '../../../lib/evaluate/dimensionality_reduction.js' -test('dimension reduction', () => { - const model = new GPLVM(3, 1) - const x = Matrix.randn(50, 10, 0, Matrix.diag([1.0, 0.1, 1.0, 0.1, 0.1, 0.1, 0.1, 0.1, 1.0, 0.1])).toArray() +describe('dimension reduction', () => { + test('default', () => { + const model = new GPLVM(3, 1) + const x = Matrix.randn(50, 10, 0, Matrix.diag([1.0, 0.5, 1.0, 0.5, 0.5, 0.5, 0.5, 0.5, 1.0, 0.5])).toArray() - model.init(x) - const llh = model.llh() - for (let i = 0; i < 100; i++) { - model.fit() - } - expect(model.llh()).toBeLessThan(llh) - const y = model.predict() - const q = coRankingMatrix(x, y, 30, 20) - expect(q).toBeGreaterThan(0.9) + model.init(x) + const llh = model.llh() + for (let i = 0; i < 100; i++) { + model.fit() + } + expect(model.llh()).toBeLessThan(llh) + const y = model.predict() + const q = coRankingMatrix(x, y, 30, 20) + expect(q).toBeGreaterThan(0.9) + }) + + test('kernel with params', () => { + const model = new GPLVM(3, 1, 1, 0.005, 0.1, { name: 'gaussian', a: 0.2, b: 1 }) + const x = Matrix.randn(50, 10, 0, Matrix.diag([1.0, 0.1, 1.0, 0.1, 0.1, 0.1, 0.1, 0.1, 1.0, 0.1])).toArray() + + model.init(x) + const llh = model.llh() + for (let i = 0; i < 100; i++) { + model.fit() + } + expect(model.llh()).toBeLessThan(llh) + const y = model.predict() + const q = coRankingMatrix(x, y, 30, 20) + expect(q).toBeGreaterThan(0.9) + }) }) test('reconstruct', () => { diff --git a/tests/lib/model/ocsvm.test.js b/tests/lib/model/ocsvm.test.js index ef1a8dbbf..e70f53ae0 100644 --- a/tests/lib/model/ocsvm.test.js +++ b/tests/lib/model/ocsvm.test.js @@ -5,8 +5,8 @@ import Matrix from '../../../lib/util/matrix.js' import OCSVM from '../../../lib/model/ocsvm.js' describe('anomaly detection', () => { - test('default', () => { - const model = new OCSVM(1, 'gaussian') + test.each(['gaussian', { name: 'gaussian', d: 0.8 }])('%p', kernel => { + const model = new OCSVM(1, kernel) const x = Matrix.randn(100, 2, 0, 0.2).toArray() x.push([10, 10]) model.init(x) @@ -25,8 +25,8 @@ describe('anomaly detection', () => { expect(y[y.length - 1]).toBe(true) }) - test('linear', () => { - const model = new OCSVM(1, 'linear') + test.each(['linear', { name: 'linear' }])('%p', kernel => { + const model = new OCSVM(1, kernel) const x = Matrix.randn(100, 2, 0, 0.2).toArray() x.push([-10, -10]) model.init(x) diff --git a/tests/lib/model/pca.test.js b/tests/lib/model/pca.test.js index 620753810..4d062a26d 100644 --- a/tests/lib/model/pca.test.js +++ b/tests/lib/model/pca.test.js @@ -62,40 +62,38 @@ describe('dual', () => { }) }) -describe.each([ - ['gaussian', []], - ['gaussian', [1.0]], - ['polynomial', []], - ['polynomial', [2]], -])('kernel %s %p', (kernel, args) => { - test('project', () => { - const model = new KernelPCA(kernel, args) - const x = Matrix.concat(Matrix.random(20, 5, -2, 2), Matrix.random(20, 5, 5, 8)).toArray() - - model.fit(x) - const y = model.predict(x) - const vari = Matrix.fromArray(y).variance(0) - for (let i = 1; i < vari.cols; i++) { - expect(vari.at(0, i)).toBeLessThanOrEqual(vari.at(0, i - 1)) - } - const q = coRankingMatrix(x, y, 20, 20) - expect(q).toBeGreaterThan(0.9) - }) - - test('reduce', () => { - const model = new KernelPCA(kernel, args) - const x = Matrix.concat(Matrix.random(20, 5, -2, 2), Matrix.random(20, 5, 5, 8)).toArray() - - model.fit(x) - const y = model.predict(x, 5) - const vari = Matrix.fromArray(y).variance(0) - for (let i = 1; i < vari.cols; i++) { - expect(vari.at(0, i)).toBeLessThanOrEqual(vari.at(0, i - 1)) - } - const q = coRankingMatrix(x, y, 20, 20) - expect(q).toBeGreaterThan(0.9) - }) -}) +describe.each(['gaussian', { name: 'gaussian', sigma: 1.0 }, 'polynomial', { name: 'polynomial', n: 2 }])( + 'kernel %s', + kernel => { + test('project', () => { + const model = new KernelPCA(kernel) + const x = Matrix.concat(Matrix.random(20, 5, -2, 2), Matrix.random(20, 5, 5, 8)).toArray() + + model.fit(x) + const y = model.predict(x) + const vari = Matrix.fromArray(y).variance(0) + for (let i = 1; i < vari.cols; i++) { + expect(vari.at(0, i)).toBeLessThanOrEqual(vari.at(0, i - 1)) + } + const q = coRankingMatrix(x, y, 20, 20) + expect(q).toBeGreaterThan(0.9) + }) + + test('reduce', () => { + const model = new KernelPCA(kernel) + const x = Matrix.concat(Matrix.random(20, 5, -2, 2), Matrix.random(20, 5, 5, 8)).toArray() + + model.fit(x) + const y = model.predict(x, 5) + const vari = Matrix.fromArray(y).variance(0) + for (let i = 1; i < vari.cols; i++) { + expect(vari.at(0, i)).toBeLessThanOrEqual(vari.at(0, i - 1)) + } + const q = coRankingMatrix(x, y, 20, 20) + expect(q).toBeGreaterThan(0.9) + }) + } +) describe('custom kernel', () => { test('project', () => { diff --git a/tests/lib/model/s3vm.test.js b/tests/lib/model/s3vm.test.js index 9bef10d4c..de4587657 100644 --- a/tests/lib/model/s3vm.test.js +++ b/tests/lib/model/s3vm.test.js @@ -7,7 +7,7 @@ import S3VM from '../../../lib/model/s3vm.js' import { accuracy } from '../../../lib/evaluate/classification.js' describe('semi-classifier', () => { - test.each(['gaussian', 'linear'])('kernel %s', kernel => { + test.each(['gaussian', { name: 'gaussian', d: 0.8 }, 'linear', { name: 'linear' }])('kernel %s', kernel => { const model = new S3VM(kernel) const x = Matrix.concat(Matrix.randn(50, 2, 0, 0.2), Matrix.randn(50, 2, 5, 0.2)).toArray() const t = [] diff --git a/tests/lib/model/svc.test.js b/tests/lib/model/svc.test.js index 2fd09f02b..e4ecbb20b 100644 --- a/tests/lib/model/svc.test.js +++ b/tests/lib/model/svc.test.js @@ -7,8 +7,8 @@ import SVC from '../../../lib/model/svc.js' import { randIndex } from '../../../lib/evaluate/clustering.js' describe('clustering', () => { - test('gaussian', () => { - const model = new SVC('gaussian') + test.each(['gaussian', { name: 'gaussian', d: 0.8 }])('%p', kernel => { + const model = new SVC(kernel) const n = 50 const x = Matrix.concat( Matrix.concat(Matrix.randn(n, 2, 0, 0.1), Matrix.randn(n, 2, 5, 0.1)), @@ -31,8 +31,8 @@ describe('clustering', () => { expect(ri).toBeGreaterThan(0.8) }) - test('linear', () => { - const model = new SVC('linear') + test.each(['linear', { name: 'linear' }])('%p', kernel => { + const model = new SVC(kernel) const n = 50 const x = Matrix.concat( Matrix.concat(Matrix.randn(n, 2, 0, 0.1), Matrix.randn(n, 2, 5, 0.1)), diff --git a/tests/lib/model/svm.test.js b/tests/lib/model/svm.test.js index e38060612..7ec3bdc65 100644 --- a/tests/lib/model/svm.test.js +++ b/tests/lib/model/svm.test.js @@ -4,27 +4,24 @@ import SVM from '../../../lib/model/svm.js' import { accuracy } from '../../../lib/evaluate/classification.js' describe('classification', () => { - test.each([ - ['gaussian', undefined], - ['gaussian', []], - ['gaussian', [0.2]], - ['linear', undefined], - ['linear', []], - ])('fit %p', (kernel, args) => { - const model = new SVM(kernel, args) - const x = Matrix.concat(Matrix.randn(50, 2, 0, 0.2), Matrix.randn(50, 2, 5, 0.2)).toArray() - const t = [] - for (let i = 0; i < x.length; i++) { - t[i] = Math.floor(i / 50) * 2 - 1 - } - model.init(x, t) - for (let i = 0; i < 100; i++) { - model.fit() + test.each(['gaussian', { name: 'gaussian' }, { name: 'gaussian', d: 0.2 }, 'linear', { name: 'linear' }])( + 'fit %p', + kernel => { + const model = new SVM(kernel) + const x = Matrix.concat(Matrix.randn(50, 2, 0, 0.2), Matrix.randn(50, 2, 5, 0.2)).toArray() + const t = [] + for (let i = 0; i < x.length; i++) { + t[i] = Math.floor(i / 50) * 2 - 1 + } + model.init(x, t) + for (let i = 0; i < 100; i++) { + model.fit() + } + const y = model.predict(x) + const acc = accuracy(y.map(Math.sign), t.map(Math.sign)) + expect(acc).toBeGreaterThan(0.9) } - const y = model.predict(x) - const acc = accuracy(y.map(Math.sign), t.map(Math.sign)) - expect(acc).toBeGreaterThan(0.9) - }) + ) test('custom kernel', () => { const model = new SVM((a, b) => Math.exp(-2 * a.reduce((s, v, i) => s + (v - b[i]) ** 2, 0) ** 2)) diff --git a/tests/lib/model/svr.test.js b/tests/lib/model/svr.test.js index 6bc6cd955..4dac82798 100644 --- a/tests/lib/model/svr.test.js +++ b/tests/lib/model/svr.test.js @@ -7,11 +7,8 @@ import SVR from '../../../lib/model/svr.js' import { rmse } from '../../../lib/evaluate/regression.js' describe('regression', () => { - test.each([ - ['gaussian', [2]], - ['linear', []], - ])('kernel %s %p', (kernel, args) => { - const model = new SVR(kernel, args) + test.each([{ name: 'gaussian', d: 2 }, 'linear'])('kernel %s', kernel => { + const model = new SVR(kernel) const x = Matrix.random(50, 2, -2, 2).toArray() const t = [] for (let i = 0; i < x.length; i++) { From bc754c9b96f3c41aa6376ec8b35d6d63fb5fdc7c Mon Sep 17 00:00:00 2001 From: ishii-norimi Date: Wed, 20 Mar 2024 11:49:38 +0900 Subject: [PATCH 2/2] Accept object for kernel argument --- lib/model/bogd.js | 11 +++-- lib/model/bpa.js | 11 +++-- lib/model/bsgd.js | 22 +++++---- lib/model/coll.js | 11 +++-- lib/model/denclue.js | 4 +- lib/model/diffusion_map.js | 4 +- lib/model/forgetron.js | 11 +++-- lib/model/kdeos.js | 13 +++-- lib/model/kernel_density_estimator.js | 7 ++- lib/model/kernelized_pegasos.js | 11 +++-- lib/model/kernelized_perceptron.js | 11 +++-- lib/model/projectron.js | 22 +++++---- lib/model/rdos.js | 4 +- lib/model/ridge.js | 10 ++-- lib/model/rkof.js | 30 +++++++----- lib/model/silk.js | 13 +++-- lib/model/stoptron.js | 11 +++-- lib/model/tightest_perceptron.js | 11 +++-- tests/lib/model/bogd.test.js | 3 +- tests/lib/model/bpa.test.js | 6 +-- tests/lib/model/bsgd.test.js | 12 ++--- tests/lib/model/coll.test.js | 47 ++++++++++--------- tests/lib/model/denclue.test.js | 24 ++++++++++ tests/lib/model/diffusion_map.test.js | 4 +- tests/lib/model/forgetron.test.js | 6 +-- tests/lib/model/kdeos.test.js | 4 +- .../model/kernel_density_estimator.test.js | 16 ++++++- tests/lib/model/kernelized_pegasos.test.js | 31 ++++++------ tests/lib/model/kernelized_perceptron.test.js | 29 +++++++----- tests/lib/model/projectron.test.js | 12 ++--- tests/lib/model/rdos.test.js | 4 +- tests/lib/model/ridge.test.js | 4 +- tests/lib/model/rkof.test.js | 4 +- tests/lib/model/silk.test.js | 6 ++- tests/lib/model/stoptron.test.js | 29 +++++++----- tests/lib/model/tightest_perceptron.test.js | 4 +- 36 files changed, 285 insertions(+), 177 deletions(-) diff --git a/lib/model/bogd.js b/lib/model/bogd.js index 61016460d..3a720d79d 100644 --- a/lib/model/bogd.js +++ b/lib/model/bogd.js @@ -10,7 +10,7 @@ export default class BOGD { * @param {number} [lambda=0.1] Regularization parameter * @param {number} [gamma=0.1] Maximum coefficient * @param {'uniform' | 'nonuniform'} [sampling=nonuniform] Sampling approach - * @param {'gaussian' | 'polynomial' | function (number[], number[]): number} [kernel=gaussian] Kernel name + * @param {'gaussian' | 'polynomial' | { name: 'gaussian', s?: number } | { name: 'polynomial', d?: number } | function (number[], number[]): number} [kernel=gaussian] Kernel name * @param {'zero_one' | 'hinge'} [loss=hinge] Loss type name */ constructor( @@ -31,14 +31,17 @@ export default class BOGD { if (typeof kernel === 'function') { this._kernel = kernel } else { - switch (kernel) { + if (typeof kernel === 'string') { + kernel = { name: kernel } + } + switch (kernel.name) { case 'gaussian': - this._s = 1 + this._s = kernel.s ?? 1 this._kernel = (a, b) => Math.exp(-(a.reduce((s, v, i) => s + (v - b[i]) ** 2, 0) ** 2) / this._s ** 2) break case 'polynomial': - this._d = 2 + this._d = kernel.d ?? 2 this._kernel = (a, b) => (1 + a.reduce((s, v, i) => s + v * b[i])) ** this._d break } diff --git a/lib/model/bpa.js b/lib/model/bpa.js index e346cd52e..eac51dc3f 100644 --- a/lib/model/bpa.js +++ b/lib/model/bpa.js @@ -10,7 +10,7 @@ export default class BPA { * @param {number} [c=1] Regularization parameter * @param {number} [b=10] Budget size * @param {'simple' | 'projecting' | 'nn'} [version=simple] Version - * @param {'gaussian' | 'polynomial' | function (number[], number[]): number} [kernel=gaussian] Kernel name + * @param {'gaussian' | 'polynomial' | { name: 'gaussian', s?: number } | { name: 'polynomial', d?: number } | function (number[], number[]): number} [kernel=gaussian] Kernel name */ constructor(c = 1, b = 10, version = 'simple', kernel = 'gaussian') { this._c = c @@ -19,14 +19,17 @@ export default class BPA { if (typeof kernel === 'function') { this._kernel = kernel } else { - switch (kernel) { + if (typeof kernel === 'string') { + kernel = { name: kernel } + } + switch (kernel.name) { case 'gaussian': - this._s = 1 + this._s = kernel.s ?? 1 this._kernel = (a, b) => Math.exp(-(a.reduce((s, v, i) => s + (v - b[i]) ** 2, 0) ** 2) / this._s ** 2) break case 'polynomial': - this._d = 2 + this._d = kernel.d ?? 2 this._kernel = (a, b) => (1 + a.reduce((s, v, i) => s + v * b[i])) ** this._d break } diff --git a/lib/model/bsgd.js b/lib/model/bsgd.js index 6ecd507cd..f19e6b762 100644 --- a/lib/model/bsgd.js +++ b/lib/model/bsgd.js @@ -11,7 +11,7 @@ export default class BSGD { * @param {number} [eta=1] Learning rate * @param {number} [lambda=1] Regularization parameter * @param {'removal' | 'projection' | 'merging'} [maintenance=removal] Maintenance type - * @param {'gaussian' | 'polynomial' | function (number[], number[]): number} [kernel=gaussian] Kernel name + * @param {'gaussian' | 'polynomial' | { name: 'gaussian', s?: number } | { name: 'polynomial', d?: number } | function (number[], number[]): number} [kernel=gaussian] Kernel name */ constructor(b = 10, eta = 1, lambda = 1, maintenance = 'removal', kernel = 'gaussian') { this._b = b @@ -21,14 +21,17 @@ export default class BSGD { if (typeof kernel === 'function') { this._kernel = kernel } else { - switch (kernel) { + if (typeof kernel === 'string') { + kernel = { name: kernel } + } + switch (kernel.name) { case 'gaussian': - this._s = 1 + this._s = kernel.s ?? 1 this._kernel = (a, b) => Math.exp(-(a.reduce((s, v, i) => s + (v - b[i]) ** 2, 0) ** 2) / this._s ** 2) break case 'polynomial': - this._d = 2 + this._d = kernel.d ?? 2 this._kernel = (a, b) => (1 + a.reduce((s, v, i) => s + v * b[i])) ** this._d break } @@ -195,7 +198,7 @@ export class MulticlassBSGD { * @param {number} [eta=1] Learning rate * @param {number} [lambda=1] Regularization parameter * @param {'removal' | 'projection' | 'merging'} [maintenance=removal] Maintenance type - * @param {'gaussian' | 'polynomial' | function (number[], number[]): number} [kernel=gaussian] Kernel name + * @param {'gaussian' | 'polynomial' | { name: 'gaussian', s?: number } | { name: 'polynomial', d?: number } | function (number[], number[]): number} [kernel=gaussian] Kernel name */ constructor(b = 10, eta = 1, lambda = 1, maintenance = 'removal', kernel = 'gaussian') { this._b = b @@ -205,14 +208,17 @@ export class MulticlassBSGD { if (typeof kernel === 'function') { this._kernel = kernel } else { - switch (kernel) { + if (typeof kernel === 'string') { + kernel = { name: kernel } + } + switch (kernel.name) { case 'gaussian': - this._s = 1 + this._s = kernel.s ?? 1 this._kernel = (a, b) => Math.exp(-(a.reduce((s, v, i) => s + (v - b[i]) ** 2, 0) ** 2) / this._s ** 2) break case 'polynomial': - this._d = 2 + this._d = kernel.d ?? 2 this._kernel = (a, b) => (1 + a.reduce((s, v, i) => s + v * b[i])) ** this._d break } diff --git a/lib/model/coll.js b/lib/model/coll.js index d97bf1df8..b61b994d3 100644 --- a/lib/model/coll.js +++ b/lib/model/coll.js @@ -9,7 +9,7 @@ export default class COLL { /** * @param {number} c Number of clusters * @param {number} [eta=1] Initial learning rate - * @param {'gaussian' | 'polynomial' | function (number[], number[]): number} [kernel=gaussian] Kernel name + * @param {'gaussian' | 'polynomial' | { name: 'gaussian', s?: number } | { name: 'polynomial', d?: number } | function (number[], number[]): number} [kernel=gaussian] Kernel name */ constructor(c, eta = 1, kernel = 'gaussian') { this._c = c @@ -17,14 +17,17 @@ export default class COLL { if (typeof kernel === 'function') { this._kernel = kernel } else { - switch (kernel) { + if (typeof kernel === 'string') { + kernel = { name: kernel } + } + switch (kernel.name) { case 'gaussian': - this._s = 1 + this._s = kernel.s ?? 1 this._kernel = (a, b) => Math.exp(-(a.reduce((s, v, i) => s + (v - b[i]) ** 2, 0) ** 2) / this._s ** 2) break case 'polynomial': - this._d = 2 + this._d = kernel.d ?? 2 this._kernel = (a, b) => (1 + a.reduce((s, v, i) => s + v * b[i])) ** this._d break } diff --git a/lib/model/denclue.js b/lib/model/denclue.js index 79f56f8d5..70de43572 100644 --- a/lib/model/denclue.js +++ b/lib/model/denclue.js @@ -8,7 +8,7 @@ export default class DENCLUE { /** * @param {number} h Smoothing parameter for the kernel * @param {1 | 2} [version=1] Version number - * @param {'gaussian' | function (number[]): number} [kernel=gaussian] Kernel name + * @param {'gaussian' | { name: 'gaussian' } | function (number[]): number} [kernel=gaussian] Kernel name */ constructor(h, version = 1, kernel = 'gaussian') { this._version = version @@ -19,7 +19,7 @@ export default class DENCLUE { if (typeof kernel === 'function') { this._kernel = kernel - } else if (kernel === 'gaussian') { + } else if (kernel === 'gaussian' || kernel.name === 'gaussian') { this._kernel = u => { const d = u.length return Math.exp(-u.reduce((s, v) => s + v ** 2, 0) / 2) / (2 * Math.PI) ** (d / 2) diff --git a/lib/model/diffusion_map.js b/lib/model/diffusion_map.js index 77e5fce6f..98335a201 100644 --- a/lib/model/diffusion_map.js +++ b/lib/model/diffusion_map.js @@ -8,13 +8,13 @@ export default class DiffusionMap { // https://inside.mines.edu/~whereman/papers/delaPorte-Herbst-Hereman-vanderWalt-PRASA-2008.pdf /** * @param {number} t Power parameter - * @param {'gaussian' | function (number[], number[]): number} [kernel=gaussian] Kernel name + * @param {'gaussian' | { name: 'gaussian' } | function (number[], number[]): number} [kernel=gaussian] Kernel name */ constructor(t, kernel = 'gaussian') { this._t = t if (typeof kernel === 'function') { this._k = kernel - } else if (kernel === 'gaussian') { + } else if (kernel === 'gaussian' || kernel.name === 'gaussian') { this._k = (x, y) => Math.exp(-x.reduce((s, v, i) => s + (v - y[i]) ** 2, 0) / 2) } } diff --git a/lib/model/forgetron.js b/lib/model/forgetron.js index 734d16cb6..573e5b873 100644 --- a/lib/model/forgetron.js +++ b/lib/model/forgetron.js @@ -9,21 +9,24 @@ export default class Forgetron { // https://github.com/LIBOL/KOL /** * @param {number} b Budget parameter - * @param {'gaussian' | 'polynomial' | function (number[], number[]): number} [kernel=gaussian] Kernel name + * @param {'gaussian' | 'polynomial' | { name: 'gaussian', s?: number } | { name: 'polynomial', d?: number } | function (number[], number[]): number} [kernel=gaussian] Kernel name */ constructor(b, kernel = 'gaussian') { this._b = b if (typeof kernel === 'function') { this._kernel = kernel } else { - switch (kernel) { + if (typeof kernel === 'string') { + kernel = { name: kernel } + } + switch (kernel.name) { case 'gaussian': - this._s = 1 + this._s = kernel.s ?? 1 this._kernel = (a, b) => Math.exp(-(a.reduce((s, v, i) => s + (v - b[i]) ** 2, 0) ** 2) / this._s ** 2) break case 'polynomial': - this._d = 2 + this._d = kernel.d ?? 2 this._kernel = (a, b) => (1 + a.reduce((s, v, i) => s + v * b[i])) ** this._d break } diff --git a/lib/model/kdeos.js b/lib/model/kdeos.js index 962017ade..35b7d67d0 100644 --- a/lib/model/kdeos.js +++ b/lib/model/kdeos.js @@ -7,7 +7,7 @@ export default class KDEOS { /** * @param {number} kmin Minimum number of neighborhoods * @param {number} kmax Maximum number of neighborhoods - * @param {'gaussian' | 'epanechnikov' | function (number, number, number): number} [kernel=gaussian] Kernel name + * @param {'gaussian' | 'epanechnikov' | { name: 'gaussian' } | { name: 'epanechnikov' } | function (number, number, number): number} [kernel=gaussian] Kernel name */ constructor(kmin, kmax, kernel = 'gaussian') { this._kmin = kmin @@ -16,10 +16,15 @@ export default class KDEOS { this._phi = 0.1 if (typeof kernel === 'function') { this._kernel = kernel - } else if (kernel === 'gaussian') { - this._kernel = (u, h, d) => Math.exp(-(u ** 2) / (2 * h ** 2)) / (h * Math.sqrt(2 * Math.PI)) ** d } else { - this._kernel = (u, h, d) => (3 * (1 - (u / h) ** 2)) / (4 * h ** d) + if (typeof kernel === 'string') { + kernel = { name: kernel } + } + if (kernel.name === 'gaussian') { + this._kernel = (u, h, d) => Math.exp(-(u ** 2) / (2 * h ** 2)) / (h * Math.sqrt(2 * Math.PI)) ** d + } else { + this._kernel = (u, h, d) => (3 * (1 - (u / h) ** 2)) / (4 * h ** d) + } } } diff --git a/lib/model/kernel_density_estimator.js b/lib/model/kernel_density_estimator.js index 07a6fda5c..7b3d14e1c 100644 --- a/lib/model/kernel_density_estimator.js +++ b/lib/model/kernel_density_estimator.js @@ -6,14 +6,17 @@ export default class KernelDensityEstimator { // http://ibis.t.u-tokyo.ac.jp/suzuki/lecture/2015/dataanalysis/L9.pdf /** * @param {number} [h=0] Smoothing parameter for the kernel - * @param {'gaussian' | 'rectangular' | 'triangular' | 'epanechnikov' | 'biweight' | 'triweight' | function (number): number} [kernel=gaussian] Kernel name + * @param {'gaussian' | 'rectangular' | 'triangular' | 'epanechnikov' | 'biweight' | 'triweight' | { name: 'gaussian' } | { name: 'rectangular' } | { name: 'triangular' } | { name: 'epanechnikov' } | { name: 'biweight' } | { name: 'triweight' } | function (number): number} [kernel=gaussian] Kernel name */ constructor(h = 0, kernel = 'gaussian') { this._h = h if (typeof kernel === 'function') { this._kernel = kernel } else { - switch (kernel) { + if (typeof kernel === 'string') { + kernel = { name: kernel } + } + switch (kernel.name) { case 'gaussian': this._kernel = x => Math.exp((-x * x) / 2) / Math.sqrt(2 * Math.PI) break diff --git a/lib/model/kernelized_pegasos.js b/lib/model/kernelized_pegasos.js index 491768b29..365d8effa 100644 --- a/lib/model/kernelized_pegasos.js +++ b/lib/model/kernelized_pegasos.js @@ -7,7 +7,7 @@ export default class KernelizedPegasos { // https://sandipanweb.wordpress.com/2018/04/29/implementing-pegasos-primal-estimated-sub-gradient-solver-for-svm-using-it-for-sentiment-classification-and-switching-to-logistic-regression-objective-by-changing-the-loss-function-in-python/ /** * @param {number} rate Learning rate - * @param {'gaussian' | 'polynomial' | function (number[], number[]): number} [kernel=gaussian] Kernel name + * @param {'gaussian' | 'polynomial' | { name: 'gaussian', s?: number } | { name: 'polynomial', d?: number } | function (number[], number[]): number} [kernel=gaussian] Kernel name */ constructor(rate, kernel = 'gaussian') { this._r = rate @@ -15,14 +15,17 @@ export default class KernelizedPegasos { if (typeof kernel === 'function') { this._kernel = kernel } else { - switch (kernel) { + if (typeof kernel === 'string') { + kernel = { name: kernel } + } + switch (kernel.name) { case 'gaussian': - this._s = 1 + this._s = kernel.s ?? 1 this._kernel = (a, b) => Math.exp(-(a.reduce((s, v, i) => s + (v - b[i]) ** 2, 0) ** 2) / this._s ** 2) break case 'polynomial': - this._d = 2 + this._d = kernel.d ?? 2 this._kernel = (a, b) => (1 + a.reduce((s, v, i) => s + v * b[i])) ** this._d break } diff --git a/lib/model/kernelized_perceptron.js b/lib/model/kernelized_perceptron.js index 807f0913b..8e756c1a8 100644 --- a/lib/model/kernelized_perceptron.js +++ b/lib/model/kernelized_perceptron.js @@ -8,21 +8,24 @@ export default class KernelizedPerceptron { // https://cseweb.ucsd.edu/~yfreund/papers/LargeMarginsUsingPerceptron.pdf /** * @param {number} [rate=1] Learning rate - * @param {'gaussian' | 'polynomial' | function (number[], number[]): number} [kernel=gaussian] Kernel name + * @param {'gaussian' | 'polynomial' | { name: 'gaussian', s?: number } | { name: 'polynomial', d?: number } | function (number[], number[]): number} [kernel=gaussian] Kernel name */ constructor(rate = 1, kernel = 'gaussian') { this._r = rate if (typeof kernel === 'function') { this._kernel = kernel } else { - switch (kernel) { + if (typeof kernel === 'string') { + kernel = { name: kernel } + } + switch (kernel.name) { case 'gaussian': - this._s = 1 + this._s = kernel.s ?? 1 this._kernel = (a, b) => Math.exp(-(a.reduce((s, v, i) => s + (v - b[i]) ** 2, 0) ** 2) / this._s ** 2) break case 'polynomial': - this._d = 2 + this._d = kernel.d ?? 2 this._kernel = (a, b) => (1 + a.reduce((s, v, i) => s + v * b[i])) ** this._d break } diff --git a/lib/model/projectron.js b/lib/model/projectron.js index 082f6ba37..ba5fcc5b6 100644 --- a/lib/model/projectron.js +++ b/lib/model/projectron.js @@ -9,21 +9,24 @@ export class Projectron { // https://github.com/LIBOL/KOL /** * @param {number} [eta=0] Threshold - * @param {'gaussian' | 'polynomial' | function (number[], number[]): number} [kernel=gaussian] Kernel name + * @param {'gaussian' | 'polynomial' | { name: 'gaussian', s?: number } | { name: 'polynomial', d?: number } | function (number[], number[]): number} [kernel=gaussian] Kernel name */ constructor(eta = 0, kernel = 'gaussian') { this._eta = eta if (typeof kernel === 'function') { this._kernel = kernel } else { - switch (kernel) { + if (typeof kernel === 'string') { + kernel = { name: kernel } + } + switch (kernel.name) { case 'gaussian': - this._s = 1 + this._s = kernel.s ?? 1 this._kernel = (a, b) => Math.exp(-(a.reduce((s, v, i) => s + (v - b[i]) ** 2, 0) ** 2) / this._s ** 2) break case 'polynomial': - this._d = 2 + this._d = kernel.d ?? 2 this._kernel = (a, b) => (1 + a.reduce((s, v, i) => s + v * b[i])) ** this._d break } @@ -128,21 +131,24 @@ export class Projectronpp { // https://github.com/LIBOL/KOL /** * @param {number} [eta=0] Threshold - * @param {'gaussian' | 'polynomial' | function (number[], number[]): number} [kernel=gaussian] Kernel name + * @param {'gaussian' | 'polynomial' | { name: 'gaussian', s?: number } | { name: 'polynomial', d?: number } | function (number[], number[]): number} [kernel=gaussian] Kernel name */ constructor(eta = 0, kernel = 'gaussian') { this._eta = eta if (typeof kernel === 'function') { this._kernel = kernel } else { - switch (kernel) { + if (typeof kernel === 'string') { + kernel = { name: kernel } + } + switch (kernel.name) { case 'gaussian': - this._s = 1 + this._s = kernel.s ?? 1 this._kernel = (a, b) => Math.exp(-(a.reduce((s, v, i) => s + (v - b[i]) ** 2, 0) ** 2) / this._s ** 2) break case 'polynomial': - this._d = 2 + this._d = kernel.d ?? 2 this._kernel = (a, b) => (1 + a.reduce((s, v, i) => s + v * b[i])) ** this._d break } diff --git a/lib/model/rdos.js b/lib/model/rdos.js index 3cc3a425f..100ae2f94 100644 --- a/lib/model/rdos.js +++ b/lib/model/rdos.js @@ -7,14 +7,14 @@ export default class RDOS { /** * @param {number} k Number of neighborhoods * @param {number} h Kernel width - * @param {'gaussian' | function (number[]): number} [kernel=gaussian] Kernel name + * @param {'gaussian' | { name: 'gaussian' } | function (number[]): number} [kernel=gaussian] Kernel name */ constructor(k, h, kernel = 'gaussian') { this._k = k this._h = h if (typeof kernel === 'function') { this._kernel = kernel - } else if (kernel === 'gaussian') { + } else if (kernel === 'gaussian' || kernel.name === 'gaussian') { this._kernel = x => Math.exp(-x.reduce((s, v) => s + v ** 2, 0) / 2) / Math.sqrt(2 * Math.PI) ** x.length } } diff --git a/lib/model/ridge.js b/lib/model/ridge.js index f9ac6892d..19f61c68d 100644 --- a/lib/model/ridge.js +++ b/lib/model/ridge.js @@ -123,7 +123,7 @@ export class MulticlassRidge { export class KernelRidge { /** * @param {number} [lambda=0.1] Regularization strength - * @param {'gaussian' | function (number[], number[]): number} [kernel=gaussian] Kernel name + * @param {'gaussian' | { name: 'gaussian', s?: number } | function (number[], number[]): number} [kernel=gaussian] Kernel name */ constructor(lambda = 0.1, kernel = 'gaussian') { this._w = null @@ -132,8 +132,12 @@ export class KernelRidge { this._kernel = null if (typeof kernel === 'function') { this._kernel = (a, b) => kernel(a.value, b.value) - } else if (kernel === 'gaussian') { - this._kernel = (x, y, sigma = 1.0) => { + } else { + if (typeof kernel === 'string') { + kernel = { name: kernel } + } + const s = kernel.s ?? 1.0 + this._kernel = (x, y, sigma = s) => { const s = Matrix.sub(x, y).reduce((acc, v) => acc + v * v, 0) return Math.exp(-s / sigma ** 2) } diff --git a/lib/model/rkof.js b/lib/model/rkof.js index 7de111de5..f8ee8f18a 100644 --- a/lib/model/rkof.js +++ b/lib/model/rkof.js @@ -8,7 +8,7 @@ export default class RKOF { * @param {number} k Number of neighborhoods * @param {number} h Smoothing parameter * @param {number} alpha Sensitivity parameter - * @param {'gaussian' | 'epanechnikov' | 'volcano' | function (number[]): number} [kernel=gaussian] Kernel name + * @param {'gaussian' | 'epanechnikov' | 'volcano' | { name: 'gaussian' } | { name: 'epanechnikov' } | { name: 'volcano', beta?: number } | function (number[]): number} [kernel=gaussian] Kernel name */ constructor(k, h, alpha, kernel = 'gaussian') { this._k = k @@ -17,18 +17,24 @@ export default class RKOF { this._s = 1 if (typeof kernel === 'function') { this._kernel = kernel - } else if (kernel === 'gaussian') { - this._kernel = x => Math.exp(-x.reduce((s, v) => s + v ** 2, 0) / 2) / Math.sqrt(2 * Math.PI) ** x.length - } else if (kernel === 'epanechnikov') { - this._kernel = x => { - const s2 = x.reduce((s, v) => s + v ** 2, 0) - return s2 > 1 ? 0 : (3 / 4) ** x.length * (1 - s2) + } else { + if (typeof kernel === 'string') { + kernel = { name: kernel } } - } else if (kernel === 'volcano') { - this._beta = 1 - this._kernel = x => { - const s2 = x.reduce((s, v) => s + v ** 2, 0) - return s2 <= 1 ? this._beta : this._beta * Math.exp(-s2 + 1) + if (kernel.name === 'gaussian') { + this._kernel = x => + Math.exp(-x.reduce((s, v) => s + v ** 2, 0) / 2) / Math.sqrt(2 * Math.PI) ** x.length + } else if (kernel.name === 'epanechnikov') { + this._kernel = x => { + const s2 = x.reduce((s, v) => s + v ** 2, 0) + return s2 > 1 ? 0 : (3 / 4) ** x.length * (1 - s2) + } + } else if (kernel.name === 'volcano') { + this._beta = kernel.beta ?? 1 + this._kernel = x => { + const s2 = x.reduce((s, v) => s + v ** 2, 0) + return s2 <= 1 ? this._beta : this._beta * Math.exp(-s2 + 1) + } } } } diff --git a/lib/model/silk.js b/lib/model/silk.js index 474bc2bef..0d3dc33db 100644 --- a/lib/model/silk.js +++ b/lib/model/silk.js @@ -8,7 +8,7 @@ export class ILK { * @param {number} [eta=1] Learning rate * @param {number} [lambda=1] Regularization constant * @param {number} [c=1] Penalty imposed on point prediction violations. - * @param {'gaussian' | 'polynomial' | function (number[], number[]): number} [kernel=gaussian] Kernel name + * @param {'gaussian' | 'polynomial' | { name: 'gaussian', s?: number } | { name: 'polynomial', d?: number } | function (number[], number[]): number} [kernel=gaussian] Kernel name * @param {'square' | 'hinge' | 'logistic'} [loss=hinge] Loss type name */ constructor(eta = 1, lambda = 1, c = 1, kernel = 'gaussian', loss = 'hinge') { @@ -18,14 +18,17 @@ export class ILK { if (typeof kernel === 'function') { this._kernel = kernel } else { - switch (kernel) { + if (typeof kernel === 'string') { + kernel = { name: kernel } + } + switch (kernel.name) { case 'gaussian': - this._s = 1 + this._s = kernel.s ?? 1 this._kernel = (a, b) => Math.exp(-(a.reduce((s, v, i) => s + (v - b[i]) ** 2, 0) ** 2) / this._s ** 2) break case 'polynomial': - this._d = 2 + this._d = kernel.d ?? 2 this._kernel = (a, b) => (1 + a.reduce((s, v, i) => s + v * b[i])) ** this._d break } @@ -140,7 +143,7 @@ export class SILK extends ILK { * @param {number} [lambda=1] Regularization constant * @param {number} [c=1] Penalty imposed on point prediction violations. * @param {number} [w=10] Buffer size - * @param {'gaussian' | 'polynomial' | function (number[], number[]): number} [kernel=gaussian] Kernel name + * @param {'gaussian' | 'polynomial' | { name: 'gaussian', s?: number } | { name: 'polynomial', d?: number } | function (number[], number[]): number} [kernel=gaussian] Kernel name * @param {'square' | 'hinge' | 'graph' | 'logistic'} [loss=hinge] Loss type name */ constructor(eta = 1, lambda = 1, c = 1, w = 10, kernel = 'gaussian', loss = 'hinge') { diff --git a/lib/model/stoptron.js b/lib/model/stoptron.js index 95a345fbd..071aee28f 100644 --- a/lib/model/stoptron.js +++ b/lib/model/stoptron.js @@ -10,21 +10,24 @@ export default class Stoptron { // https://www.jmlr.org/papers/volume13/wang12b/wang12b.pdf /** * @param {number} [n=10] Cachs size - * @param {'gaussian' | 'polynomial' | function (number[], number[]): number} [kernel=gaussian] Kernel name + * @param {'gaussian' | 'polynomial' | { name: 'gaussian', s?: number } | { name: 'polynomial', d?: number } | function (number[], number[]): number} [kernel=gaussian] Kernel name */ constructor(n = 10, kernel = 'gaussian') { this._n = n if (typeof kernel === 'function') { this._kernel = kernel } else { - switch (kernel) { + if (typeof kernel === 'string') { + kernel = { name: kernel } + } + switch (kernel.name) { case 'gaussian': - this._s = 1 + this._s = kernel.s ?? 1 this._kernel = (a, b) => Math.exp(-(a.reduce((s, v, i) => s + (v - b[i]) ** 2, 0) ** 2) / this._s ** 2) break case 'polynomial': - this._d = 2 + this._d = kernel.d ?? 2 this._kernel = (a, b) => (1 + a.reduce((s, v, i) => s + v * b[i])) ** this._d break } diff --git a/lib/model/tightest_perceptron.js b/lib/model/tightest_perceptron.js index 3c1a6ba97..fa8e7ecf0 100644 --- a/lib/model/tightest_perceptron.js +++ b/lib/model/tightest_perceptron.js @@ -111,7 +111,7 @@ export default class TightestPerceptron { // https://www.dabi.temple.edu/external/vucetic/documents/wang09ijcnn.pdf /** * @param {number} [b=10] Budget size - * @param {'gaussian' | 'polynomial' | function (number[], number[]): number} [kernel=gaussian] Kernel name + * @param {'gaussian' | 'polynomial' | { name: 'gaussian', s?: number } | { name: 'polynomial', d?: number } | function (number[], number[]): number} [kernel=gaussian] Kernel name * @param {'zero_one' | 'hinge'} [accuracyLoss=hinge] Accuracy loss type name */ constructor(b = 10, kernel = 'gaussian', accuracyLoss = 'hinge') { @@ -120,14 +120,17 @@ export default class TightestPerceptron { if (typeof kernel === 'function') { this._kernel = kernel } else { - switch (kernel) { + if (typeof kernel === 'string') { + kernel = { name: kernel } + } + switch (kernel.name) { case 'gaussian': - this._s = 1 + this._s = kernel.s ?? 1 this._kernel = (a, b) => Math.exp(-(a.reduce((s, v, i) => s + (v - b[i]) ** 2, 0) ** 2) / this._s ** 2) break case 'polynomial': - this._d = 2 + this._d = kernel.d ?? 2 this._kernel = (a, b) => (1 + a.reduce((s, v, i) => s + v * b[i])) ** this._d break } diff --git a/tests/lib/model/bogd.test.js b/tests/lib/model/bogd.test.js index 5fecd5ccc..c85e72ab7 100644 --- a/tests/lib/model/bogd.test.js +++ b/tests/lib/model/bogd.test.js @@ -28,6 +28,7 @@ describe('classification', () => { test.each([ undefined, 'gaussian', + { name: 'gaussian', s: 0.8 }, (a, b) => Math.exp(-(a.reduce((s, v, i) => s + (v - b[i]) ** 2, 0) ** 2)), ])('kernel %s', kernel => { const model = new BOGD(10, 0.1, 0.1, 10, sampling, kernel, loss) @@ -49,7 +50,7 @@ describe('classification', () => { describe.each([undefined, 'uniform', 'nonuniform'])('sampling %s', sampling => { describe.each(['zero_one'])('loss %s', loss => { - test.each(['polynomial'])('kernel %s', kernel => { + test.each(['polynomial', { name: 'polynomial', d: 3 }])('kernel %s', kernel => { const model = new BOGD(10, 0.2, 0.2, 5, sampling, kernel, loss) const n = 50 const x = Matrix.concat(Matrix.randn(n, 2, 0, 0.1), Matrix.randn(n, 2, 5, 0.1)).toArray() diff --git a/tests/lib/model/bpa.test.js b/tests/lib/model/bpa.test.js index 4a05658e3..43fcee873 100644 --- a/tests/lib/model/bpa.test.js +++ b/tests/lib/model/bpa.test.js @@ -23,7 +23,7 @@ describe('classification', () => { }) describe.each([undefined, 'simple', 'projecting', 'nn'])('version %s', version => { - test.each([undefined, 'gaussian'])('kernel %s', kernel => { + test.each([undefined, 'gaussian', { name: 'gaussian', s: 0.8 }])('kernel %s', kernel => { const model = new BPA(1, 10, version, kernel) const x = Matrix.concat(Matrix.randn(50, 2, 0, 0.2), Matrix.randn(50, 2, 5, 0.2)).toArray() const t = [] @@ -56,8 +56,8 @@ describe('classification', () => { }) }) - test('kernel polynomial', () => { - const model = new BPA(1, 10, 'simple', 'polynomial') + test.each(['polynomial', { name: 'polynomial', d: 3 }])('kernel %s', kernel => { + const model = new BPA(1, 10, 'simple', kernel) const x = Matrix.concat(Matrix.randn(50, 2, 0, 0.2), Matrix.randn(50, 2, 5, 0.2)).toArray() const t = [] for (let i = 0; i < x.length; i++) { diff --git a/tests/lib/model/bsgd.test.js b/tests/lib/model/bsgd.test.js index d4eb0e797..9d605cede 100644 --- a/tests/lib/model/bsgd.test.js +++ b/tests/lib/model/bsgd.test.js @@ -23,7 +23,7 @@ describe('classification', () => { }) describe.each([undefined, 'removal', 'projection', 'merging'])('maintenance %s', maintenance => { - test.each([undefined, 'gaussian'])('kernel %s', kernel => { + test.each([undefined, 'gaussian', { name: 'gaussian', s: 0.8 }])('kernel %s', kernel => { const model = new BSGD(10, 1, 0.01, maintenance, kernel) const x = Matrix.concat(Matrix.randn(50, 2, 0, 0.2), Matrix.randn(50, 2, 5, 0.2)).toArray() const t = [] @@ -38,8 +38,8 @@ describe('classification', () => { expect(acc).toBeGreaterThan(0.9) }) - test('kernel polynomial', () => { - const model = new BSGD(10, 1, 0.01, maintenance, 'polynomial') + test.each(['polynomial', { name: 'polynomial', d: 3 }])('kernel %s', kernel => { + const model = new BSGD(10, 1, 0.01, maintenance, kernel) const x = Matrix.concat(Matrix.randn(50, 2, 0, 0.2), Matrix.randn(50, 2, 5, 0.2)).toArray() const t = [] for (let i = 0; i < x.length; i++) { @@ -89,7 +89,7 @@ describe('multiclass classification', () => { }) describe.each([undefined, 'removal', 'projection', 'merging'])('maintenance %s', maintenance => { - test.each([undefined, 'gaussian'])('kernel %s', kernel => { + test.each([undefined, 'gaussian', { name: 'gaussian', s: 0.8 }])('kernel %s', kernel => { const model = new MulticlassBSGD(10, 1, 0.01, maintenance, kernel) const x = Matrix.concat(Matrix.randn(50, 2, 0, 0.2), Matrix.randn(50, 2, 5, 0.2)).toArray() const t = [] @@ -104,8 +104,8 @@ describe('multiclass classification', () => { expect(acc).toBeGreaterThan(0.9) }) - test('kernel polynomial', () => { - const model = new MulticlassBSGD(10, 1, 0.01, maintenance, 'polynomial') + test.each(['polynomial', { name: 'polynomial', d: 3 }])('kernel %s', kernel => { + const model = new MulticlassBSGD(10, 1, 0.01, maintenance, kernel) const x = Matrix.concat(Matrix.randn(50, 2, 0, 0.2), Matrix.randn(50, 2, 5, 0.2)).toArray() const t = [] for (let i = 0; i < x.length; i++) { diff --git a/tests/lib/model/coll.test.js b/tests/lib/model/coll.test.js index eca7952bc..ea956bfe6 100644 --- a/tests/lib/model/coll.test.js +++ b/tests/lib/model/coll.test.js @@ -4,31 +4,34 @@ import COLL from '../../../lib/model/coll.js' import { randIndex } from '../../../lib/evaluate/clustering.js' describe('clustering', () => { - test.each([undefined, 'gaussian', 'polynomial'])('kernel %p', kernel => { - const model = new COLL(3, undefined, kernel) - const n = 50 - const x = Matrix.concat( - Matrix.concat(Matrix.randn(n, 2, 0, 0.1), Matrix.randn(n, 2, 5, 0.1)), - Matrix.randn(n, 2, [0, 5], 0.1) - ).toArray() + test.each([undefined, 'gaussian', { name: 'gaussian', s: 0.8 }, 'polynomial', { name: 'polynomial', d: 3 }])( + 'kernel %p', + kernel => { + const model = new COLL(3, undefined, kernel) + const n = 50 + const x = Matrix.concat( + Matrix.concat(Matrix.randn(n, 2, 0, 0.1), Matrix.randn(n, 2, 5, 0.1)), + Matrix.randn(n, 2, [0, 5], 0.1) + ).toArray() - model.init(x) - const first_err = model.fit() - for (let i = 0; i < 100; i++) { - model.fit() - } - const last_err = model.fit() - expect(last_err).toBeLessThan(first_err) - const y = model.predict(x) - expect(y).toHaveLength(x.length) + model.init(x) + const first_err = model.fit() + for (let i = 0; i < 100; i++) { + model.fit() + } + const last_err = model.fit() + expect(last_err).toBeLessThan(first_err) + const y = model.predict(x) + expect(y).toHaveLength(x.length) - const t = [] - for (let i = 0; i < x.length; i++) { - t[i] = Math.floor(i / n) + const t = [] + for (let i = 0; i < x.length; i++) { + t[i] = Math.floor(i / n) + } + const ri = randIndex(y, t) + expect(ri).toBeGreaterThan(0.9) } - const ri = randIndex(y, t) - expect(ri).toBeGreaterThan(0.9) - }) + ) test('custom kernel', () => { const model = new COLL(3, undefined, (a, b) => diff --git a/tests/lib/model/denclue.test.js b/tests/lib/model/denclue.test.js index 222ea4fc5..3a5d03985 100644 --- a/tests/lib/model/denclue.test.js +++ b/tests/lib/model/denclue.test.js @@ -31,6 +31,30 @@ describe('clustering', () => { expect(ri).toBeGreaterThan(0.9) }) + test.each(['gaussian', { name: 'gaussian' }])('kernel %p', kernel => { + const model = new DENCLUE(0.2, undefined, kernel) + const n = 50 + const x = Matrix.concat( + Matrix.concat(Matrix.randn(n, 2, 0, 0.1), Matrix.randn(n, 2, 5, 0.1)), + Matrix.randn(n, 2, [0, 5], 0.1) + ).toArray() + + model.init(x) + for (let i = 0; i < 100; i++) { + model.fit() + } + expect(model.size).toBeGreaterThanOrEqual(3) + const y = model.predict() + expect(y).toHaveLength(x.length) + + const t = [] + for (let i = 0; i < x.length; i++) { + t[i] = Math.floor(i / n) + } + const ri = randIndex(y, t) + expect(ri).toBeGreaterThan(0.9) + }) + test('custom kernel', () => { const model = new DENCLUE(2, 1, a => (Math.sqrt(a.reduce((s, v) => s + v ** 2, 0)) < 1 ? 1 : 0)) const n = 50 diff --git a/tests/lib/model/diffusion_map.test.js b/tests/lib/model/diffusion_map.test.js index 9cb3ab2ba..c4f7cdcff 100644 --- a/tests/lib/model/diffusion_map.test.js +++ b/tests/lib/model/diffusion_map.test.js @@ -7,10 +7,10 @@ import DiffusionMap from '../../../lib/model/diffusion_map.js' import { coRankingMatrix } from '../../../lib/evaluate/dimensionality_reduction.js' describe('dimensionality reduction', () => { - test('default', () => { + test.each([undefined, 'gaussian', { name: 'gaussian' }])('kernel %p', kernel => { const x = Matrix.concat(Matrix.random(40, 5, -2, 1), Matrix.random(40, 5, 3, 5)).toArray() - const y = new DiffusionMap(3).predict(x, 2) + const y = new DiffusionMap(3, kernel).predict(x, 2) const q = coRankingMatrix(x, y, 20, 20) expect(q).toBeGreaterThan(0.9) }) diff --git a/tests/lib/model/forgetron.test.js b/tests/lib/model/forgetron.test.js index ba73976ff..e7783c496 100644 --- a/tests/lib/model/forgetron.test.js +++ b/tests/lib/model/forgetron.test.js @@ -7,7 +7,7 @@ import Forgetron from '../../../lib/model/forgetron.js' import { accuracy } from '../../../lib/evaluate/classification.js' describe('classification', () => { - test.each([undefined, 'gaussian'])('kernel %s', kernel => { + test.each([undefined, 'gaussian', { name: 'gaussian', s: 0.8 }])('kernel %s', kernel => { const model = new Forgetron(10, kernel) const s = 2 const x = [] @@ -31,8 +31,8 @@ describe('classification', () => { expect(acc).toBeGreaterThan(0.95) }) - test('kernel polynomial', () => { - const model = new Forgetron(100, 'polynomial') + test.each(['polynomial', { name: 'polynomial', d: 3 }])('kernel %s', kernel => { + const model = new Forgetron(100, kernel) const x = Matrix.concat(Matrix.randn(50, 2, 0, 0.2), Matrix.randn(50, 2, 5, 0.2)).toArray() const t = [] for (let i = 0; i < x.length; i++) { diff --git a/tests/lib/model/kdeos.test.js b/tests/lib/model/kdeos.test.js index a05688cf4..b6129a744 100644 --- a/tests/lib/model/kdeos.test.js +++ b/tests/lib/model/kdeos.test.js @@ -5,7 +5,7 @@ import Matrix from '../../../lib/util/matrix.js' import KDEOS from '../../../lib/model/kdeos.js' describe('anomaly detection', () => { - test.each([undefined, 'gaussian'])('kernel %p', kernel => { + test.each([undefined, 'gaussian', { name: 'gaussian' }])('kernel %p', kernel => { const model = new KDEOS(5, 10, kernel) const x = Matrix.randn(100, 2, 0, 0.1).toArray() x.push([10, 10]) @@ -21,7 +21,7 @@ describe('anomaly detection', () => { expect(y[y.length - 1]).toBe(true) }) - test.each(['epanechnikov'])('kernel %p', kernel => { + test.each(['epanechnikov', { name: 'epanechnikov' }])('kernel %p', kernel => { const model = new KDEOS(5, 10, kernel) const x = Matrix.randn(100, 2, 0, 0.1).toArray() const y = model.predict(x) diff --git a/tests/lib/model/kernel_density_estimator.test.js b/tests/lib/model/kernel_density_estimator.test.js index 1c2cc9fef..f93516c3a 100644 --- a/tests/lib/model/kernel_density_estimator.test.js +++ b/tests/lib/model/kernel_density_estimator.test.js @@ -26,7 +26,19 @@ describe('density estimation', () => { expect(corr).toBeGreaterThan(0.9) }) - test.each([undefined, 'gaussian', 'triangular', 'epanechnikov', 'biweight', 'triweight'])('kernel %s', kernel => { + test.each([ + undefined, + 'gaussian', + { name: 'gaussian' }, + 'triangular', + { name: 'triangular' }, + 'epanechnikov', + { name: 'epanechnikov' }, + 'biweight', + { name: 'biweight' }, + 'triweight', + { name: 'triweight' }, + ])('kernel %s', kernel => { const model = new KernelDensityEstimator(0, kernel) const n = 500 const x = Matrix.concat(Matrix.randn(n, 2, 0, 0.1), Matrix.randn(n, 2, 5, 0.1)).toArray() @@ -45,7 +57,7 @@ describe('density estimation', () => { expect(corr).toBeGreaterThan(0.9) }) - test.each(['rectangular'])('kernel %s', kernel => { + test.each(['rectangular', { name: 'rectangular' }])('kernel %s', kernel => { const model = new KernelDensityEstimator(0, kernel) const n = 500 const x = Matrix.concat(Matrix.randn(n, 2, 0, 0.1), Matrix.randn(n, 2, 5, 0.1)).toArray() diff --git a/tests/lib/model/kernelized_pegasos.test.js b/tests/lib/model/kernelized_pegasos.test.js index b721ccf14..b02ad0eee 100644 --- a/tests/lib/model/kernelized_pegasos.test.js +++ b/tests/lib/model/kernelized_pegasos.test.js @@ -7,21 +7,24 @@ import KernelizedPegasos from '../../../lib/model/kernelized_pegasos.js' import { accuracy } from '../../../lib/evaluate/classification.js' describe('classification', () => { - test.each([undefined, 'gaussian', 'polynomial'])('kernel %s', kernel => { - const model = new KernelizedPegasos(0.1, kernel) - const x = Matrix.concat(Matrix.randn(50, 2, 0, 0.2), Matrix.randn(50, 2, 5, 0.2)).toArray() - const t = [] - for (let i = 0; i < x.length; i++) { - t[i] = Math.floor(i / 50) * 2 - 1 - } - model.init(x, t) - for (let i = 0; i < 100; i++) { - model.fit() + test.each([undefined, 'gaussian', { name: 'gaussian', s: 0.8 }, 'polynomial', { name: 'polynomial', d: 3 }])( + 'kernel %s', + kernel => { + const model = new KernelizedPegasos(0.1, kernel) + const x = Matrix.concat(Matrix.randn(50, 2, 0, 0.2), Matrix.randn(50, 2, 5, 0.2)).toArray() + const t = [] + for (let i = 0; i < x.length; i++) { + t[i] = Math.floor(i / 50) * 2 - 1 + } + model.init(x, t) + for (let i = 0; i < 100; i++) { + model.fit() + } + const y = model.predict(x) + const acc = accuracy(y, t) + expect(acc).toBeGreaterThan(0.9) } - const y = model.predict(x) - const acc = accuracy(y, t) - expect(acc).toBeGreaterThan(0.9) - }) + ) test('custom kernel', () => { const model = new KernelizedPegasos(0.1, (a, b) => diff --git a/tests/lib/model/kernelized_perceptron.test.js b/tests/lib/model/kernelized_perceptron.test.js index dab48966e..38b509010 100644 --- a/tests/lib/model/kernelized_perceptron.test.js +++ b/tests/lib/model/kernelized_perceptron.test.js @@ -7,20 +7,23 @@ import KernelizedPerceptron from '../../../lib/model/kernelized_perceptron.js' import { accuracy } from '../../../lib/evaluate/classification.js' describe('classification', () => { - test.each([undefined, 'gaussian', 'polynomial'])('kernel %s', kernel => { - const model = new KernelizedPerceptron(1, kernel) - const x = Matrix.concat(Matrix.randn(50, 2, 0, 0.2), Matrix.randn(50, 2, 5, 0.2)).toArray() - const t = [] - for (let i = 0; i < x.length; i++) { - t[i] = Math.floor(i / 50) * 2 - 1 + test.each([undefined, 'gaussian', { name: 'gaussian', s: 0.8 }, 'polynomial', { name: 'polynomial', d: 3 }])( + 'kernel %s', + kernel => { + const model = new KernelizedPerceptron(1, kernel) + const x = Matrix.concat(Matrix.randn(50, 2, 0, 0.2), Matrix.randn(50, 2, 5, 0.2)).toArray() + const t = [] + for (let i = 0; i < x.length; i++) { + t[i] = Math.floor(i / 50) * 2 - 1 + } + for (let i = 0; i < 10; i++) { + model.fit(x, t) + } + const y = model.predict(x) + const acc = accuracy(y, t) + expect(acc).toBeGreaterThan(0.9) } - for (let i = 0; i < 10; i++) { - model.fit(x, t) - } - const y = model.predict(x) - const acc = accuracy(y, t) - expect(acc).toBeGreaterThan(0.9) - }) + ) test('custom kernel', () => { const model = new KernelizedPerceptron(undefined, (a, b) => diff --git a/tests/lib/model/projectron.test.js b/tests/lib/model/projectron.test.js index e794b974a..70efbf97a 100644 --- a/tests/lib/model/projectron.test.js +++ b/tests/lib/model/projectron.test.js @@ -31,7 +31,7 @@ describe('projectron', () => { expect(acc).toBeGreaterThan(0.95) }) - test.each([undefined, 'gaussian'])('kernel %s', kernel => { + test.each([undefined, 'gaussian', { name: 'gaussian', s: 0.8 }])('kernel %s', kernel => { const model = new Projectron(0.1, kernel) const s = 5 const x = [] @@ -55,8 +55,8 @@ describe('projectron', () => { expect(acc).toBeGreaterThan(0.95) }) - test('kernel polynomial', () => { - const model = new Projectron(0.1, 'polynomial') + test.each(['polynomial', { name: 'polynomial' }])('kernel %s', kernel => { + const model = new Projectron(0.1, kernel) const x = Matrix.concat(Matrix.randn(50, 2, 0, 0.2), Matrix.randn(50, 2, 5, 0.2)).toArray() const t = [] for (let i = 0; i < x.length; i++) { @@ -111,7 +111,7 @@ describe('projectron++', () => { expect(acc).toBeGreaterThan(0.95) }) - test.each([undefined, 'gaussian'])('kernel %s', kernel => { + test.each([undefined, 'gaussian', { name: 'gaussian', s: 0.8 }])('kernel %s', kernel => { const model = new Projectronpp(0.1, kernel) const s = 5 const x = [] @@ -135,8 +135,8 @@ describe('projectron++', () => { expect(acc).toBeGreaterThan(0.95) }) - test('kernel polynomial', () => { - const model = new Projectronpp(0.1, 'polynomial') + test.each(['polynomial', { name: 'polynomial' }])('kernel %s', kernel => { + const model = new Projectronpp(0.1, kernel) const x = Matrix.concat(Matrix.randn(50, 2, 0, 0.2), Matrix.randn(50, 2, 5, 0.2)).toArray() const t = [] for (let i = 0; i < x.length; i++) { diff --git a/tests/lib/model/rdos.test.js b/tests/lib/model/rdos.test.js index 74e1d0ea8..a372f95f9 100644 --- a/tests/lib/model/rdos.test.js +++ b/tests/lib/model/rdos.test.js @@ -5,8 +5,8 @@ import Matrix from '../../../lib/util/matrix.js' import RDOS from '../../../lib/model/rdos.js' describe('anomaly detection', () => { - test('default', () => { - const model = new RDOS(5, 0.5) + test.each([undefined, 'gaussian', { name: 'gaussian' }])('kernel %s', kernel => { + const model = new RDOS(5, 0.5, kernel) const x = Matrix.randn(100, 2, 0, 0.2).toArray() x.push([10, 10]) const threshold = 3 diff --git a/tests/lib/model/ridge.test.js b/tests/lib/model/ridge.test.js index 8494c83cc..c7c63df80 100644 --- a/tests/lib/model/ridge.test.js +++ b/tests/lib/model/ridge.test.js @@ -85,8 +85,8 @@ describe('kernel ridge', () => { expect(model._lambda).toBe(0.1) }) - test('fit', () => { - const model = new KernelRidge(0.01, 'gaussian') + test.each([undefined, 'gaussian', { name: 'gaussian', s: 0.8 }])('fit %s', kernel => { + const model = new KernelRidge(0.01, kernel) const x = Matrix.randn(50, 2, 0, 5).toArray() const t = [] for (let i = 0; i < x.length; i++) { diff --git a/tests/lib/model/rkof.test.js b/tests/lib/model/rkof.test.js index 87903e363..365fdb884 100644 --- a/tests/lib/model/rkof.test.js +++ b/tests/lib/model/rkof.test.js @@ -5,7 +5,7 @@ import Matrix from '../../../lib/util/matrix.js' import RKOF from '../../../lib/model/rkof.js' describe('anomaly detection', () => { - test.each([undefined, 'gaussian', 'volcano'])('kernel %s', kernel => { + test.each([undefined, 'gaussian', { name: 'gaussian' }, 'volcano', { name: 'volcano' }])('kernel %s', kernel => { const model = new RKOF(5, 0.5, 0.5, kernel) const x = Matrix.randn(100, 2, 0, 0.2).toArray() x.push([10, 10]) @@ -17,7 +17,7 @@ describe('anomaly detection', () => { expect(y[y.length - 1]).toBe(true) }) - test.each(['epanechnikov'])('kernel %s', kernel => { + test.each(['epanechnikov', { name: 'epanechnikov', beta: 0.9 }])('kernel %s', kernel => { const model = new RKOF(5, 0.5, 0.5, kernel) const x = Matrix.randn(100, 2, 0, 0.2).toArray() x.push([10, 10]) diff --git a/tests/lib/model/silk.test.js b/tests/lib/model/silk.test.js index d1fade209..b9901467b 100644 --- a/tests/lib/model/silk.test.js +++ b/tests/lib/model/silk.test.js @@ -26,6 +26,7 @@ describe('ilk classification', () => { test.each([ undefined, 'gaussian', + { name: 'gaussian', s: 0.8 }, (a, b) => Math.exp(-(a.reduce((s, v, i) => s + (v - b[i]) ** 2, 0) ** 2) / 0.01), ])('kernel %s', kernel => { const model = new ILK(1, 1, 1, kernel, loss) @@ -42,7 +43,7 @@ describe('ilk classification', () => { expect(acc).toBeGreaterThan(0.9) }) - test.each(['polynomial'])('kernel %s', kernel => { + test.each(['polynomial', { name: 'polynomial', d: 3 }])('kernel %s', kernel => { const model = new ILK(1, 0.1, 1, kernel, loss) const x = Matrix.concat(Matrix.randn(50, 2, 0, 0.2), Matrix.randn(50, 2, 5, 0.2)).toArray() const t = [] @@ -83,6 +84,7 @@ describe('silk classification', () => { test.each([ undefined, 'gaussian', + { name: 'gaussian', s: 0.8 }, (a, b) => Math.exp(-(a.reduce((s, v, i) => s + (v - b[i]) ** 2, 0) ** 2) / 0.01), ])('kernel %s', kernel => { const model = new SILK(1, 1, 1, 100, kernel, loss) @@ -99,7 +101,7 @@ describe('silk classification', () => { expect(acc).toBeGreaterThan(0.9) }) - test.each(['polynomial'])('kernel %s', kernel => { + test.each(['polynomial', { name: 'polynomial', d: 3 }])('kernel %s', kernel => { const model = new SILK(1, 0.1, 1, 100, kernel, loss) const x = Matrix.concat(Matrix.randn(50, 2, 0, 0.2), Matrix.randn(50, 2, 5, 0.2)).toArray() const t = [] diff --git a/tests/lib/model/stoptron.test.js b/tests/lib/model/stoptron.test.js index 3faf9edd9..b6930e49c 100644 --- a/tests/lib/model/stoptron.test.js +++ b/tests/lib/model/stoptron.test.js @@ -22,20 +22,23 @@ describe('classification', () => { expect(acc).toBeGreaterThan(0.9) }) - test.each([undefined, 'gaussian', 'polynomial'])('kernel %s', kernel => { - const model = new Stoptron(100, kernel) - const x = Matrix.concat(Matrix.randn(50, 2, 0, 0.2), Matrix.randn(50, 2, 5, 0.2)).toArray() - const t = [] - for (let i = 0; i < x.length; i++) { - t[i] = Math.floor(i / 50) * 2 - 1 + test.each([undefined, 'gaussian', { name: 'gaussian', s: 0.8 }, 'polynomial', { name: 'polynomial', d: 3 }])( + 'kernel %s', + kernel => { + const model = new Stoptron(100, kernel) + const x = Matrix.concat(Matrix.randn(50, 2, 0, 0.2), Matrix.randn(50, 2, 5, 0.2)).toArray() + const t = [] + for (let i = 0; i < x.length; i++) { + t[i] = Math.floor(i / 50) * 2 - 1 + } + for (let i = 0; i < 10; i++) { + model.fit(x, t) + } + const y = model.predict(x) + const acc = accuracy(y, t) + expect(acc).toBeGreaterThan(0.9) } - for (let i = 0; i < 10; i++) { - model.fit(x, t) - } - const y = model.predict(x) - const acc = accuracy(y, t) - expect(acc).toBeGreaterThan(0.9) - }) + ) test('custom kernel', () => { const model = new Stoptron(100, (a, b) => diff --git a/tests/lib/model/tightest_perceptron.test.js b/tests/lib/model/tightest_perceptron.test.js index 91ae4dfac..92835c782 100644 --- a/tests/lib/model/tightest_perceptron.test.js +++ b/tests/lib/model/tightest_perceptron.test.js @@ -24,7 +24,7 @@ describe('classification', () => { }) describe.each([undefined, 'zero_one', 'hinge'])('accuracyLoss %s', accuracyLoss => { - test.each([undefined, 'gaussian'])('kernel %s', kernel => { + test.each([undefined, 'gaussian', { name: 'gaussian', s: 0.8 }])('kernel %s', kernel => { const model = new TightestPerceptron(10, kernel, accuracyLoss) const x = Matrix.concat(Matrix.randn(50, 2, 0, 0.2), Matrix.randn(50, 2, 5, 0.2)).toArray() x[50] = [0.1, 0.1] @@ -60,7 +60,7 @@ describe('classification', () => { }) }) - test.each(['polynomial'])('kernel %s', kernel => { + test.each(['polynomial', { name: 'polynomial', d: 3 }])('kernel %s', kernel => { const model = new TightestPerceptron(10, kernel) const x = Matrix.concat(Matrix.randn(50, 2, 0, 0.2), Matrix.randn(50, 2, 5, 0.2)).toArray() x[50] = [0.1, 0.1]