From 641406378e2059983f68ed724feab8e43b868b9c Mon Sep 17 00:00:00 2001 From: wackfx Date: Wed, 5 Jul 2023 00:14:20 +0200 Subject: [PATCH 1/2] Reworked from source cloudflare branch feat: reran transpile fix linter feat: final touches + test files squashed 2 commits fix: Polyfills bulk (to please linter) fix: Removed MD5 + put back SHA in the digest() squashed 5 commits fix: cloudflare workers deployment feat: fixed auth fix: encrypt not found in worker :( fix: postgres SASL fix: linting --- cf/polyfills.js | 135 +++++++++++++++++++++++++++---------------- cf/src/connection.js | 8 +-- cf/src/index.js | 4 +- cf/test-pages.js | 22 +++++++ cf/test-worker.js | 15 +++++ package.json | 2 +- transpile.cf.js | 45 +++++++++------ 7 files changed, 155 insertions(+), 76 deletions(-) create mode 100644 cf/test-pages.js create mode 100644 cf/test-worker.js diff --git a/cf/polyfills.js b/cf/polyfills.js index f38a847d..ad922d10 100644 --- a/cf/polyfills.js +++ b/cf/polyfills.js @@ -13,61 +13,69 @@ const IPv4Reg = new RegExp(`^${v4Str}$`) const v6Seg = '(?:[0-9a-fA-F]{1,4})' const IPv6Reg = new RegExp( '^(' + - `(?:${v6Seg}:){7}(?:${v6Seg}|:)|` + - `(?:${v6Seg}:){6}(?:${v4Str}|:${v6Seg}|:)|` + - `(?:${v6Seg}:){5}(?::${v4Str}|(:${v6Seg}){1,2}|:)|` + - `(?:${v6Seg}:){4}(?:(:${v6Seg}){0,1}:${v4Str}|(:${v6Seg}){1,3}|:)|` + - `(?:${v6Seg}:){3}(?:(:${v6Seg}){0,2}:${v4Str}|(:${v6Seg}){1,4}|:)|` + - `(?:${v6Seg}:){2}(?:(:${v6Seg}){0,3}:${v4Str}|(:${v6Seg}){1,5}|:)|` + - `(?:${v6Seg}:){1}(?:(:${v6Seg}){0,4}:${v4Str}|(:${v6Seg}){1,6}|:)|` + - `(?::((?::${v6Seg}){0,5}:${v4Str}|(?::${v6Seg}){1,7}|:))` + - ')(%[0-9a-zA-Z-.:]{1,})?$' + `(?:${v6Seg}:){7}(?:${v6Seg}|:)|` + + `(?:${v6Seg}:){6}(?:${v4Str}|:${v6Seg}|:)|` + + `(?:${v6Seg}:){5}(?::${v4Str}|(:${v6Seg}){1,2}|:)|` + + `(?:${v6Seg}:){4}(?:(:${v6Seg}){0,1}:${v4Str}|(:${v6Seg}){1,3}|:)|` + + `(?:${v6Seg}:){3}(?:(:${v6Seg}){0,2}:${v4Str}|(:${v6Seg}){1,4}|:)|` + + `(?:${v6Seg}:){2}(?:(:${v6Seg}){0,3}:${v4Str}|(:${v6Seg}){1,5}|:)|` + + `(?:${v6Seg}:){1}(?:(:${v6Seg}){0,4}:${v4Str}|(:${v6Seg}){1,6}|:)|` + + `(?::((?::${v6Seg}){0,5}:${v4Str}|(?::${v6Seg}){1,7}|:))` + + ')(%[0-9a-zA-Z-.:]{1,})?$' ) const textEncoder = new TextEncoder() export const crypto = { - randomBytes: l => Crypto.getRandomValues(Buffer.alloc(l)), - pbkdf2Sync: async(password, salt, iterations, keylen) => Crypto.subtle.deriveBits( - { - name: 'PBKDF2', - hash: 'SHA-256', - salt, - iterations - }, - await Crypto.subtle.importKey( - 'raw', - textEncoder.encode(password), - 'PBKDF2', - false, + randomBytes: (l) => Crypto.getRandomValues(Buffer.alloc(l)), + pbkdf2Sync: async (password, salt, iterations, keylen) => + Crypto.subtle.deriveBits( + { + name: 'PBKDF2', + hash: 'SHA-256', + salt, + iterations + }, + await Crypto.subtle.importKey( + 'raw', + textEncoder.encode(password), + 'PBKDF2', + false, + ['deriveBits'] + ), + keylen * 8, ['deriveBits'] ), - keylen * 8, - ['deriveBits'] - ), createHash: (type) => ({ update: (x) => ({ digest: () => { - return type === 'sha256' - ? Crypto.subtle.digest('SHA-256', x) - : Crypto.subtle.digest('MD5', x) + if (type !== 'sha256') + throw Error('createHash only supports sha256 on cloudflare.') + if (!(x instanceof Uint8Array)) + x = textEncoder.encode(x) + return Crypto.subtle.digest('SHA-256', x) } }) }), createHmac: (type, key) => ({ - update: x => ({ - digest: async() => Buffer.from(await Crypto.subtle.sign( - 'HMAC', - await Crypto.subtle.importKey('raw', key, { name: 'HMAC', hash: 'SHA-256' }, false, ['sign']), - textEncoder.encode(x) - )) + update: (x) => ({ + digest: async () => + Buffer.from( + await Crypto.subtle.sign( + 'HMAC', + await Crypto.subtle.importKey( + 'raw', + key, + { name: 'HMAC', hash: 'SHA-256' }, + false, + ['sign'] + ), + textEncoder.encode(x) + ) + ) }) }) } -export const process = { - env: {} -} - export const os = { userInfo() { return { username: 'postgres' } @@ -81,19 +89,23 @@ export const fs = { } export const net = { - isIP: x => RegExp.prototype.test.call(IPv4Reg, x) ? 4 : RegExp.prototype.test.call(IPv6Reg, x) ? 6 : 0, + isIP: (x) => + RegExp.prototype.test.call(IPv4Reg, x) + ? 4 + : RegExp.prototype.test.call(IPv6Reg, x) + ? 6 + : 0, Socket } export { setImmediate, clearImmediate } export const tls = { - connect(x) { - const tcp = x.socket + connect({ socket: tcp, servername }) { tcp.writer.releaseLock() tcp.reader.releaseLock() tcp.readyState = 'upgrading' - tcp.raw = tcp.raw.startTls({ servername: x.servername }) + tcp.raw = tcp.raw.startTls({ servername }) tcp.raw.closed.then( () => tcp.emit('close'), (e) => tcp.emit('error', e) @@ -133,7 +145,7 @@ function Socket() { () => { tcp.readyState !== 'upgrade' ? close() - : (tcp.readyState = 'open', tcp.emit('secureConnect')) + : ((tcp.readyState = 'open'), tcp.emit('secureConnect')) }, (e) => tcp.emit('error', e) ) @@ -151,8 +163,7 @@ function Socket() { } function close() { - if (tcp.readyState === 'closed') - return + if (tcp.readyState === 'closed') return tcp.readyState = 'closed' tcp.emit('close') @@ -164,9 +175,7 @@ function Socket() { } function end(data) { - return data - ? tcp.write(data, () => tcp.raw.close()) - : tcp.raw.close() + return data ? tcp.write(data, () => tcp.raw.close()) : tcp.raw.close() } function destroy() { @@ -178,7 +187,7 @@ function Socket() { try { let done , value - while (({ done, value } = await tcp.reader.read(), !done)) + while ((({ done, value } = await tcp.reader.read()), !done)) tcp.emit('data', Buffer.from(value)) } catch (err) { error(err) @@ -211,3 +220,31 @@ function setImmediate(fn) { function clearImmediate(id) { tasks.delete(id) } + +const nowOffset = Date.now() +const now = () => Date.now() - nowOffset +const hrtime = (previousTimestamp) => { + const baseNow = Math.floor((Date.now() - now()) * 1e-3) + const clocktime = now() * 1e-3 + let seconds = Math.floor(clocktime) + baseNow + let nanoseconds = Math.floor((clocktime % 1) * 1e9) + + if (previousTimestamp) { + seconds = seconds - previousTimestamp[0] + nanoseconds = nanoseconds - previousTimestamp[1] + if (nanoseconds < 0) { + seconds-- + nanoseconds += 1e9 + } + } + return [seconds, nanoseconds] +} +hrtime.bigint = () => { + const time = hrtime() + return BigInt(`${time[0]}${time[1]}`) +} + +export const process = { + env: {}, + hrtime +} diff --git a/cf/src/connection.js b/cf/src/connection.js index 2a1414a6..ce937a17 100644 --- a/cf/src/connection.js +++ b/cf/src/connection.js @@ -1,9 +1,5 @@ -import { process } from '../polyfills.js' import { Buffer } from 'node:buffer' -import { setImmediate, clearImmediate } from '../polyfills.js' -import { net } from '../polyfills.js' -import { tls } from '../polyfills.js' -import { crypto } from '../polyfills.js' +import { setImmediate, clearImmediate, process, net, tls, crypto } from '../polyfills.js' import Stream from 'node:stream' import { stringify, handleValue, arrayParser, arraySerializer } from './types.js' @@ -131,7 +127,7 @@ function Connection(options, queues = {}, { onopen = noop, onend = noop, onclose try { x = options.socket ? (await Promise.resolve(options.socket(options))) - : net.Socket() + : await net.Socket() } catch (e) { error(e) return diff --git a/cf/src/index.js b/cf/src/index.js index da4df290..3dfbdb62 100644 --- a/cf/src/index.js +++ b/cf/src/index.js @@ -1,6 +1,4 @@ -import { process } from '../polyfills.js' -import { os } from '../polyfills.js' -import { fs } from '../polyfills.js' +import { process, os, fs } from '../polyfills.js' import { mergeUserTypes, diff --git a/cf/test-pages.js b/cf/test-pages.js new file mode 100644 index 00000000..43220e56 --- /dev/null +++ b/cf/test-pages.js @@ -0,0 +1,22 @@ +// Add your database url and run this file with +// npx wrangler pages dev ./cf --script-path test-pages.js --compatibility-date=2023-06-20 --log-level=debug --compatibility-flag=nodejs_compat +import postgres from './src/index' +const DATABASE_URL = '' + +export default { + async fetch(request, env) { + const url = new URL(request.url); + if (url.pathname.includes('/favicon.ico')) { + return new Response('') + } + if (url.pathname.startsWith('/')) { + const sql = postgres(DATABASE_URL) + const rows = await sql`SELECT table_name FROM information_schema.columns` + return new Response(rows.map((e) => e.table_name).join('\n')) + } + + // Otherwise, serve the static assets. + // Without this, the Worker will error and no assets will be served. + return env.ASSETS.fetch(request) + }, +} \ No newline at end of file diff --git a/cf/test-worker.js b/cf/test-worker.js new file mode 100644 index 00000000..15ea8f78 --- /dev/null +++ b/cf/test-worker.js @@ -0,0 +1,15 @@ +// Add your database url and run this file with +// npx wrangler dev ./cf/test-worker.js --compatibility-date=2023-06-20 --log-level=debug --compatibility-flag=nodejs_compat +import postgres from './src/index' +const DATABASE_URL = '' + +export default { + async fetch(request, env, ctx) { + if (request.url.includes('/favicon.ico')) + return new Response() + + const sql = postgres(DATABASE_URL) + const rows = await sql`SELECT table_name FROM information_schema.columns` + return new Response(rows.map((e) => e.table_name).join('\n')) + }, +} \ No newline at end of file diff --git a/package.json b/package.json index 1da5af6d..8efeef12 100644 --- a/package.json +++ b/package.json @@ -56,4 +56,4 @@ "pg", "database" ] -} +} \ No newline at end of file diff --git a/transpile.cf.js b/transpile.cf.js index cdf211fb..34b9f62b 100644 --- a/transpile.cf.js +++ b/transpile.cf.js @@ -1,14 +1,15 @@ import fs from 'fs' import path from 'path' -const empty = x => fs.readdirSync(x).forEach(f => fs.unlinkSync(path.join(x, f))) - , ensureEmpty = x => !fs.existsSync(x) ? fs.mkdirSync(x) : empty(x) +const empty = (x) => + fs.readdirSync(x).forEach((f) => fs.unlinkSync(path.join(x, f))) + , ensureEmpty = (x) => (!fs.existsSync(x) ? fs.mkdirSync(x) : empty(x)) , root = 'cf' , src = path.join(root, 'src') ensureEmpty(src) -fs.readdirSync('src').forEach(name => +fs.readdirSync('src').forEach((name) => fs.writeFileSync( path.join(src, name), transpile(fs.readFileSync(path.join('src', name), 'utf8'), name, 'src') @@ -16,23 +17,33 @@ fs.readdirSync('src').forEach(name => ) function transpile(x) { - const timers = x.includes('setImmediate') - ? 'import { setImmediate, clearImmediate } from \'../polyfills.js\'\n' - : '' - - const process = x.includes('process.') - ? 'import { process } from \'../polyfills.js\'\n' - : '' + const polyfills = [ + x.includes('setImmediate') ? ['setImmediate', 'clearImmediate'] : undefined, + x.includes('process') ? ['process'] : undefined, + x.includes('import net from \'net\'') ? ['net'] : undefined, + x.includes('import tls from \'tls\'') ? ['tls'] : undefined, + x.includes('import crypto from \'crypto\'') ? ['crypto'] : undefined, + x.includes('import os from \'os\'') ? ['os'] : undefined, + x.includes('import fs from \'fs\'') ? ['fs'] : undefined + ].filter(Boolean).flat() const buffer = x.includes('Buffer') ? 'import { Buffer } from \'node:buffer\'\n' : '' - return process + buffer + timers + x - .replace('import net from \'net\'', 'import { net } from \'../polyfills.js\'') - .replace('import tls from \'tls\'', 'import { tls } from \'../polyfills.js\'') - .replace('import crypto from \'crypto\'', 'import { crypto } from \'../polyfills.js\'') - .replace('import os from \'os\'', 'import { os } from \'../polyfills.js\'') - .replace('import fs from \'fs\'', 'import { fs } from \'../polyfills.js\'') - .replace(/ from '([a-z_]+)'/g, ' from \'node:$1\'') + return ( + buffer + + // bulk add polyfills + (polyfills.length ? `import { ${polyfills.join(', ')} } from '../polyfills.js'\n` : '') + + x + .replace(': net.Socket()', ': await net.Socket()') + .replace('import Stream from \'stream\'', 'import Stream from \'node:stream\'') + // cleanup polyfills + .replace('import crypto from \'crypto\'\n', '') + .replace('import net from \'net\'\n', '') + .replace('import tls from \'tls\'\n', '') + .replace('import os from \'os\'\n', '') + .replace('import fs from \'fs\'\n', '') + .replace(/ from '([a-z_]+)'/g, ' from \'node:$1\'') + ) } From 4dd6874697365bc316df61c33a2adf6db026bd24 Mon Sep 17 00:00:00 2001 From: wackfx Date: Wed, 5 Jul 2023 00:21:47 +0200 Subject: [PATCH 2/2] fix: merge cleanup --- cf/src/connection.js | 2 +- transpile.cf.js | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/cf/src/connection.js b/cf/src/connection.js index ce937a17..17c66a50 100644 --- a/cf/src/connection.js +++ b/cf/src/connection.js @@ -127,7 +127,7 @@ function Connection(options, queues = {}, { onopen = noop, onend = noop, onclose try { x = options.socket ? (await Promise.resolve(options.socket(options))) - : await net.Socket() + : net.Socket() } catch (e) { error(e) return diff --git a/transpile.cf.js b/transpile.cf.js index 34b9f62b..10d3bdff 100644 --- a/transpile.cf.js +++ b/transpile.cf.js @@ -36,8 +36,6 @@ function transpile(x) { // bulk add polyfills (polyfills.length ? `import { ${polyfills.join(', ')} } from '../polyfills.js'\n` : '') + x - .replace(': net.Socket()', ': await net.Socket()') - .replace('import Stream from \'stream\'', 'import Stream from \'node:stream\'') // cleanup polyfills .replace('import crypto from \'crypto\'\n', '') .replace('import net from \'net\'\n', '')