diff --git a/.github/workflows/node-test.yml b/.github/workflows/node-test.yml index 91599b68f26..0c71e6a54d6 100644 --- a/.github/workflows/node-test.yml +++ b/.github/workflows/node-test.yml @@ -40,6 +40,7 @@ jobs: - 10.x - 12.x - 14.x + - 16.x steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 @@ -123,3 +124,21 @@ jobs: node-version: ${{ matrix.node-version }} - run: npm install --package-lock-only - run: "git diff --exit-code -- package-lock.json || (echo 'Error: package-lock.json is changed during npm install! Please make sure to use npm >= 6.9.0 and commit package-lock.json.' && false)" + + check-json-schema: + runs-on: ubuntu-latest + + strategy: + matrix: + node-version: + - 12.x + + steps: + - uses: actions/checkout@v2 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v2 + with: + node-version: ${{ matrix.node-version }} + - run: npm install + - run: npm run generate:json-schema + - run: "git diff --exit-code -- schema/*.json || (echo 'Error: JSON schema is changed! Please run npm run generate:json-schema and commit the results.' && false)" diff --git a/CHANGELOG.md b/CHANGELOG.md index 8d3dba4a76e..e69de29bb2d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1 +0,0 @@ -- Add Warsaw (europe-central2) Cloud Function Location to Firebase Extension template. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 7ce9c273247..92eb719f4cb 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -53,6 +53,7 @@ repository: ```bash git clone git@github.com:firebase/firebase-tools.git cd firebase-tools +npm install # must be run the first time you clone npm link # installs dependencies, runs a build, links it into the environment ``` diff --git a/package-lock.json b/package-lock.json index cf5e716e7ee..6697ff01880 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "firebase-tools", - "version": "9.12.1", + "version": "9.14.0", "lockfileVersion": 1, "requires": true, "dependencies": { @@ -2274,11 +2274,11 @@ } }, "ajv": { - "version": "6.10.2", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.2.tgz", - "integrity": "sha512-TXtUUEYHuaTEbLZWIKUr5pmBuhDLy+8KYtPYdcV8qC+pOZL+NKqYwvWSRrVXHn+ZmRRAu8vJTAznH7Oag6RVRw==", + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", "requires": { - "fast-deep-equal": "^2.0.1", + "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" @@ -3961,6 +3961,18 @@ "v8-compile-cache": "^2.0.3" }, "dependencies": { + "ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, "ansi-regex": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", @@ -4489,9 +4501,9 @@ "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" }, "fast-deep-equal": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz", - "integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk=" + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" }, "fast-diff": { "version": "1.2.0", @@ -4513,15 +4525,6 @@ "picomatch": "^2.2.1" }, "dependencies": { - "glob-parent": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.1.tgz", - "integrity": "sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==", - "dev": true, - "requires": { - "is-glob": "^4.0.1" - } - }, "picomatch": { "version": "2.2.2", "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz", @@ -5244,9 +5247,9 @@ } }, "glob-parent": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.0.0.tgz", - "integrity": "sha512-Z2RwiujPRGluePM6j699ktJYxmPpJKCfpGA13jz2hmFZC7gKetzrWvg5KN3+OsIFmydGyZ1AVwERCq1w/ZZwRg==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "requires": { "is-glob": "^4.0.1" } @@ -5497,6 +5500,19 @@ "requires": { "ajv": "^6.5.5", "har-schema": "^2.0.0" + }, + "dependencies": { + "ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + } } }, "hard-rejection": { @@ -6153,6 +6169,15 @@ "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" }, + "json-stable-stringify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify/-/json-stable-stringify-1.0.1.tgz", + "integrity": "sha1-mnWdOcXy/1A/1TAGRu1EX4jE+a8=", + "dev": true, + "requires": { + "jsonify": "~0.0.0" + } + }, "json-stable-stringify-without-jsonify": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", @@ -6191,6 +6216,12 @@ "graceful-fs": "^4.1.6" } }, + "jsonify": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/jsonify/-/jsonify-0.0.0.tgz", + "integrity": "sha1-LHS27kHZPKUbe1qu6PUDYx0lKnM=", + "dev": true + }, "jsonparse": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz", @@ -6950,15 +6981,6 @@ "path-is-absolute": "^1.0.0" } }, - "glob-parent": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.1.tgz", - "integrity": "sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==", - "dev": true, - "requires": { - "is-glob": "^4.0.1" - } - }, "has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -7406,9 +7428,9 @@ "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==" }, "normalize-url": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-4.5.0.tgz", - "integrity": "sha512-2s47yzUxdexf1OhyRi4Em83iQk0aPvwTddtFz4hnSSw9dCEsLEGf6SwIO8ss/19S9iBb5sJaOuTvTGDeZI00BQ==" + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-4.5.1.tgz", + "integrity": "sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA==" }, "npmlog": { "version": "4.1.2", @@ -10129,9 +10151,9 @@ "integrity": "sha1-cXuPIgzAu3tE5AUUwisui7xw2Lk=" }, "trim-newlines": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.0.tgz", - "integrity": "sha512-C4+gOpvmxaSMKuEf9Qc134F1ZuOHVXKRbtEflf4NTtuuJDEIJ9p5PXsalL8SkeRw+qit1Mo+yuvMPAKwWg/1hA==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.1.tgz", + "integrity": "sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==", "dev": true }, "triple-beam": { @@ -10273,6 +10295,166 @@ "integrity": "sha512-hSAifV3k+i6lEoCJ2k6R2Z/rp/H3+8sdmcn5NrS3/3kE7+RyZXm9aqvxWqjEXHAd8b0pShatpcdMTvEdvAJltQ==", "dev": true }, + "typescript-json-schema": { + "version": "0.50.1", + "resolved": "https://registry.npmjs.org/typescript-json-schema/-/typescript-json-schema-0.50.1.tgz", + "integrity": "sha512-GCof/SDoiTDl0qzPonNEV4CHyCsZEIIf+mZtlrjoD8vURCcEzEfa2deRuxYid8Znp/e27eDR7Cjg8jgGrimBCA==", + "dev": true, + "requires": { + "@types/json-schema": "^7.0.7", + "@types/node": "^14.14.33", + "glob": "^7.1.6", + "json-stable-stringify": "^1.0.1", + "ts-node": "^9.1.1", + "typescript": "~4.2.3", + "yargs": "^16.2.0" + }, + "dependencies": { + "@types/json-schema": { + "version": "7.0.7", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.7.tgz", + "integrity": "sha512-cxWFQVseBm6O9Gbw1IWb8r6OS4OhSt3hPZLkFApLjM8TEXROBuQGLAH2i2gZpcXdLBIrpXuTDhH7Vbm1iXmNGA==", + "dev": true + }, + "@types/node": { + "version": "14.17.3", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.17.3.tgz", + "integrity": "sha512-e6ZowgGJmTuXa3GyaPbTGxX17tnThl2aSSizrFthQ7m9uLGZBXiGhgE55cjRZTF5kjZvYn9EOPOMljdjwbflxw==", + "dev": true + }, + "ansi-regex": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "dev": true + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "requires": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "glob": { + "version": "7.1.7", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", + "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "string-width": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", + "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" + } + }, + "strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.0" + } + }, + "typescript": { + "version": "4.2.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.2.4.tgz", + "integrity": "sha512-V+evlYHZnQkaz8TRBuxTA92yZBPotr5H+WhQ7bD3hZUndx5tGOa1fuCgeSjxAzM1RiN5IzvadIXTVefuuwZCRg==", + "dev": true + }, + "wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + }, + "y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true + }, + "yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "requires": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + } + }, + "yargs-parser": { + "version": "20.2.7", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.7.tgz", + "integrity": "sha512-FiNkvbeHzB/syOjIUxFDCnhSfzAL8R5vs40MgLFBorXACCOAEaWu0gRZl14vG8MR9AOJIZbmkjhusqBYZ3HTHw==", + "dev": true + } + } + }, "unbzip2-stream": { "version": "1.4.3", "resolved": "https://registry.npmjs.org/unbzip2-stream/-/unbzip2-stream-1.4.3.tgz", @@ -10791,9 +10973,9 @@ } }, "ws": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.2.3.tgz", - "integrity": "sha512-HTDl9G9hbkNDk98naoR/cHDws7+EyYMOdL1BmjsZXRUjf7d+MficC4B7HLUPlSiho0vg+CWKrGIt/VJBd1xunQ==" + "version": "7.4.6", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.4.6.tgz", + "integrity": "sha512-YmhHDO4MzaDLB+M9ym/mDA5z0naX8j7SIlT8f8z+I0VtzsRbekxEutHSme7NPS2qE8StCYQNUnfWdXta/Yu85A==" }, "xdg-basedir": { "version": "4.0.0", diff --git a/package.json b/package.json index 52019bdf20f..6a06c3677ab 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "firebase-tools", - "version": "9.12.1", + "version": "9.14.0", "description": "Command-Line Interface for Firebase", "main": "./lib/index.js", "bin": { @@ -14,6 +14,7 @@ "format:other": "npm run lint:other -- --write", "format:ts": "npm run lint:ts -- --fix --quiet", "generate:auth-api": "ts-node scripts/gen-auth-api-spec.ts", + "generate:json-schema": "typescript-json-schema --strictNullChecks --required --noExtraProps src/firebaseConfig.ts FirebaseConfig > schema/firebase-config.json", "lint": "npm run lint:ts && npm run lint:other", "lint:changed-files": "ts-node ./scripts/lint-changed-files.ts", "lint:other": "prettier --check '**/*.{md,yaml,yml}'", @@ -85,6 +86,7 @@ "@types/archiver": "^5.1.0", "JSONStream": "^1.2.1", "abort-controller": "^3.0.0", + "ajv": "^6.12.6", "archiver": "^5.0.0", "body-parser": "^1.19.0", "chokidar": "^3.0.2", @@ -205,6 +207,7 @@ "supertest": "^3.3.0", "swagger2openapi": "^6.0.3", "ts-node": "^9.1.1", - "typescript": "^3.9.5" + "typescript": "^3.9.5", + "typescript-json-schema": "^0.50.1" } } diff --git a/schema/firebase-config.json b/schema/firebase-config.json new file mode 100644 index 00000000000..e630196c201 --- /dev/null +++ b/schema/firebase-config.json @@ -0,0 +1,393 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "additionalProperties": false, + "properties": { + "database": { + "anyOf": [ + { + "additionalProperties": false, + "properties": { + "rules": { + "type": "string" + } + }, + "type": "object" + }, + { + "items": { + "additionalProperties": false, + "properties": { + "instance": { + "type": "string" + }, + "rules": { + "type": "string" + }, + "target": { + "type": "string" + } + }, + "required": [ + "rules" + ], + "type": "object" + }, + "type": "array" + } + ] + }, + "emulators": { + "additionalProperties": false, + "properties": { + "auth": { + "additionalProperties": false, + "properties": { + "host": { + "type": "string" + }, + "port": { + "type": "number" + } + }, + "type": "object" + }, + "database": { + "additionalProperties": false, + "properties": { + "host": { + "type": "string" + }, + "port": { + "type": "number" + } + }, + "type": "object" + }, + "firestore": { + "additionalProperties": false, + "properties": { + "host": { + "type": "string" + }, + "port": { + "type": "number" + } + }, + "type": "object" + }, + "functions": { + "additionalProperties": false, + "properties": { + "host": { + "type": "string" + }, + "port": { + "type": "number" + } + }, + "type": "object" + }, + "hosting": { + "additionalProperties": false, + "properties": { + "host": { + "type": "string" + }, + "port": { + "type": "number" + } + }, + "type": "object" + }, + "hub": { + "additionalProperties": false, + "properties": { + "host": { + "type": "string" + }, + "port": { + "type": "number" + } + }, + "type": "object" + }, + "logging": { + "additionalProperties": false, + "properties": { + "host": { + "type": "string" + }, + "port": { + "type": "number" + } + }, + "type": "object" + }, + "pubsub": { + "additionalProperties": false, + "properties": { + "host": { + "type": "string" + }, + "port": { + "type": "number" + } + }, + "type": "object" + }, + "storage": { + "additionalProperties": false, + "properties": { + "host": { + "type": "string" + }, + "port": { + "type": "number" + } + }, + "type": "object" + }, + "ui": { + "additionalProperties": false, + "properties": { + "enabled": { + "type": "boolean" + }, + "host": { + "type": "string" + }, + "port": { + "type": [ + "string", + "number" + ] + } + }, + "type": "object" + } + }, + "type": "object" + }, + "firestore": { + "additionalProperties": false, + "properties": { + "indexes": { + "type": "string" + }, + "rules": { + "type": "string" + } + }, + "type": "object" + }, + "functions": { + "additionalProperties": false, + "properties": { + "ignore": { + "items": { + "type": "string" + }, + "type": "array" + }, + "predeploy": { + "items": { + "type": "string" + }, + "type": "array" + }, + "source": { + "type": "string" + } + }, + "type": "object" + }, + "hosting": { + "additionalProperties": false, + "properties": { + "appAssociation": { + "type": "string" + }, + "cleanUrls": { + "type": "boolean" + }, + "headers": { + "items": { + "additionalProperties": false, + "properties": { + "headers": { + "items": { + "additionalProperties": false, + "properties": { + "key": { + "type": "string" + }, + "value": { + "type": "string" + } + }, + "required": [ + "key", + "value" + ], + "type": "object" + }, + "type": "array" + }, + "source": { + "type": "string" + } + }, + "required": [ + "headers", + "source" + ], + "type": "object" + }, + "type": "array" + }, + "i18n": { + "additionalProperties": false, + "properties": { + "root": { + "type": "string" + } + }, + "required": [ + "root" + ], + "type": "object" + }, + "ignore": { + "items": { + "type": "string" + }, + "type": "array" + }, + "postdeploy": { + "type": "string" + }, + "public": { + "type": "string" + }, + "redirects": { + "items": { + "additionalProperties": false, + "properties": { + "destination": { + "type": "string" + }, + "source": { + "type": "string" + }, + "type": { + "type": "number" + } + }, + "required": [ + "destination", + "source", + "type" + ], + "type": "object" + }, + "type": "array" + }, + "rewrites": { + "items": { + "additionalProperties": false, + "properties": { + "destination": { + "type": "string" + }, + "dynamicLinks": { + "type": "boolean" + }, + "function": { + "type": "string" + }, + "run": { + "additionalProperties": false, + "properties": { + "region": { + "type": "string" + }, + "serviceId": { + "type": "string" + } + }, + "required": [ + "serviceId" + ], + "type": "object" + }, + "source": { + "type": "string" + } + }, + "required": [ + "source" + ], + "type": "object" + }, + "type": "array" + }, + "trailingSlash": { + "type": "boolean" + } + }, + "required": [ + "public" + ], + "type": "object" + }, + "remoteconfig": { + "additionalProperties": false, + "properties": { + "template": { + "type": "string" + } + }, + "required": [ + "template" + ], + "type": "object" + }, + "storage": { + "anyOf": [ + { + "additionalProperties": false, + "properties": { + "rules": { + "type": "string" + } + }, + "required": [ + "rules" + ], + "type": "object" + }, + { + "items": { + "additionalProperties": false, + "properties": { + "bucket": { + "type": "string" + }, + "rules": { + "type": "string" + } + }, + "required": [ + "bucket", + "rules" + ], + "type": "object" + }, + "type": "array" + } + ] + } + }, + "type": "object" +} + diff --git a/src/api.js b/src/api.js index a6bb7c59410..c0799a41294 100644 --- a/src/api.js +++ b/src/api.js @@ -99,6 +99,7 @@ var api = { "FIREBASE_CLOUDLOGGING_URL", "https://logging.googleapis.com" ), + containerRegistryDomain: utils.envOverride("CONTAINER_REGISTRY_DOMAIN", "gcr.io"), appDistributionOrigin: utils.envOverride( "FIREBASE_APP_DISTRIBUTION_URL", "https://firebaseappdistribution.googleapis.com" diff --git a/src/commands/ext-install.ts b/src/commands/ext-install.ts index 8627cd388f2..cd6c3a506fa 100644 --- a/src/commands/ext-install.ts +++ b/src/commands/ext-install.ts @@ -14,6 +14,7 @@ import { Command } from "../command"; import { FirebaseError } from "../error"; import * as getProjectId from "../getProjectId"; import * as extensionsApi from "../extensions/extensionsApi"; +import * as provisioningHelper from "../extensions/provisioningHelper"; import { displayWarningPrompts } from "../extensions/warnings"; import * as paramHelper from "../extensions/paramHelper"; import { @@ -57,6 +58,8 @@ async function installExtension(options: InstallExtensionOptions): Promise "Installing your extension instance. This usually takes 3 to 5 minutes..." ); try { + await provisioningHelper.checkProductsProvisioned(projectId, spec); + if (spec.billingRequired) { const enabled = await checkBillingEnabled(projectId); if (!enabled) { diff --git a/src/commands/ext-update.ts b/src/commands/ext-update.ts index a3a5fdac9e1..062eedcb250 100644 --- a/src/commands/ext-update.ts +++ b/src/commands/ext-update.ts @@ -12,6 +12,7 @@ import { displayNode10UpdateBillingNotice } from "../extensions/billingMigration import { enableBilling } from "../extensions/checkProjectBilling"; import { checkBillingEnabled } from "../gcp/cloudbilling"; import * as extensionsApi from "../extensions/extensionsApi"; +import * as provisioningHelper from "../extensions/provisioningHelper"; import { ensureExtensionsApiEnabled, logPrefix, @@ -232,6 +233,9 @@ export default new Command("ext:update [updateSource]") newSourceOrigin === SourceOrigin.OFFICIAL_EXTENSION || newSourceOrigin === SourceOrigin.OFFICIAL_EXTENSION_VERSION; await displayChanges(existingSpec, newSpec, isOfficial); + + await provisioningHelper.checkProductsProvisioned(projectId, newSpec); + if (newSpec.billingRequired) { const enabled = await checkBillingEnabled(projectId); if (!enabled) { diff --git a/src/commands/firestore-delete.ts b/src/commands/firestore-delete.ts index 08023fd28d8..ccda07d7485 100644 --- a/src/commands/firestore-delete.ts +++ b/src/commands/firestore-delete.ts @@ -26,12 +26,21 @@ function getConfirmationMessage(deleteOp: FirestoreDelete, options: any) { return ( "You are about to delete the document at " + clc.cyan(deleteOp.path) + - " and all of its subcollections. Are you sure?" + " and all of its subcollections " + + " for " + + clc.cyan(options.project) + + ". Are you sure?" ); } // Shallow document delete - return "You are about to delete the document at " + clc.cyan(deleteOp.path) + ". Are you sure?"; + return ( + "You are about to delete the document at " + + clc.cyan(deleteOp.path) + + " for " + + clc.cyan(options.project) + + ". Are you sure?" + ); } // Recursive collection delete @@ -39,8 +48,10 @@ function getConfirmationMessage(deleteOp: FirestoreDelete, options: any) { return ( "You are about to delete all documents in the collection at " + clc.cyan(deleteOp.path) + - " and all of their subcollections. " + - "Are you sure?" + " and all of their subcollections " + + " for " + + clc.cyan(options.project) + + ". Are you sure?" ); } @@ -48,6 +59,8 @@ function getConfirmationMessage(deleteOp: FirestoreDelete, options: any) { return ( "You are about to delete all documents in the collection at " + clc.cyan(deleteOp.path) + + " for " + + clc.cyan(options.project) + ". Are you sure?" ); } diff --git a/src/commands/init.js b/src/commands/init.js index 1796d664afb..872612e95dc 100644 --- a/src/commands/init.js +++ b/src/commands/init.js @@ -23,10 +23,66 @@ var _isOutside = function (from, to) { return path.relative(from, to).match(/^\.\./); }; +const choices = [ + { + value: "database", + name: + "Realtime Database: Configure a security rules file for Realtime Database and (optionally) provision default instance", + checked: false, + }, + { + value: "firestore", + name: "Firestore: Configure security rules and indexes files for Firestore", + checked: false, + }, + { + value: "functions", + name: "Functions: Configure a Cloud Functions directory and its files", + checked: false, + }, + { + value: "hosting", + name: + "Hosting: Configure files for Firebase Hosting and (optionally) set up GitHub Action deploys", + checked: false, + }, + { + value: "hosting:github", + name: "Hosting: Set up GitHub Action deploys", + checked: false, + }, + { + value: "storage", + name: "Storage: Configure a security rules file for Cloud Storage", + checked: false, + }, + { + value: "emulators", + name: "Emulators: Set up local emulators for Firebase products", + checked: false, + }, + { + value: "remoteconfig", + name: "Remote Config: Configure a template file for Remote Config", + checked: false, + }, +]; +const featureNames = choices.map((choice) => choice.value); + module.exports = new Command("init [feature]") .description("set up a Firebase project in the current directory") .before(requireAuth) .action(function (feature, options) { + if (feature && !featureNames.includes(feature)) { + return utils.reject( + clc.bold(feature) + + " is not a supported feature; must be one of " + + featureNames.join(", ") + + ".", + { exit: 1 } + ); + } + var cwd = options.cwd || process.cwd(); var warnings = []; @@ -70,46 +126,6 @@ module.exports = new Command("init [feature]") }), }; - var choices = [ - { - value: "database", - name: - "Realtime Database: Configure a security rules file for Realtime Database and (optionally) provision default instance", - checked: false, - }, - { - value: "firestore", - name: "Firestore: Configure security rules and indexes files for Firestore", - checked: false, - }, - { - value: "functions", - name: "Functions: Configure a Cloud Functions directory and its files", - checked: false, - }, - { - value: "hosting", - name: - "Hosting: Configure files for Firebase Hosting and (optionally) set up GitHub Action deploys", - checked: false, - }, - { - value: "storage", - name: "Storage: Configure a security rules file for Cloud Storage", - checked: false, - }, - { - value: "emulators", - name: "Emulators: Set up local emulators for Firebase products", - checked: false, - }, - { - value: "remoteconfig", - name: "Remote Config: Configure a template file for Remote Config", - checked: false, - }, - ]; - var next; // HACK: Windows Node has issues with selectables as the first prompt, so we // add an extra confirmation prompt that fixes the problem diff --git a/src/commands/logout.ts b/src/commands/logout.ts index 387b98e19c4..cfc126d3399 100644 --- a/src/commands/logout.ts +++ b/src/commands/logout.ts @@ -9,7 +9,8 @@ import { promptOnce } from "../prompt"; module.exports = new Command("logout [email]") .description("log the CLI out of Firebase") .action(async (email: string | undefined, options: any) => { - const globalToken = utils.getInheritedOption(options, "token") as string | undefined; + const globalToken = utils.getInheritedOption(options, "token"); + utils.assertIsStringOrUndefined(globalToken); const allAccounts = auth.getAllAccounts(); if (allAccounts.length === 0 && !globalToken) { diff --git a/src/commands/remoteconfig-get.ts b/src/commands/remoteconfig-get.ts index baff2614054..12f18f50006 100644 --- a/src/commands/remoteconfig-get.ts +++ b/src/commands/remoteconfig-get.ts @@ -6,6 +6,8 @@ import { RemoteConfigTemplate } from "../remoteconfig/interfaces"; import getProjectId = require("../getProjectId"); import { requirePermissions } from "../requirePermissions"; import { parseTemplateForTable } from "../remoteconfig/get"; +import { Options } from "../options"; +import * as utils from "../utils"; import Table = require("cli-table"); import * as fs from "fs"; @@ -32,7 +34,8 @@ module.exports = new Command("remoteconfig:get") ) .before(requireAuth) .before(requirePermissions, ["cloudconfig.configs.get"]) - .action(async (options) => { + .action(async (options: Options) => { + utils.assertIsString(options.versionNumber); const template: RemoteConfigTemplate = await rcGet.getTemplate( getProjectId(options), checkValidNumber(options.versionNumber) @@ -59,9 +62,15 @@ module.exports = new Command("remoteconfig:get") const fileOut = !!options.output; if (fileOut) { const shouldUseDefaultFilename = options.output === true || options.output === ""; - const filename = shouldUseDefaultFilename - ? options.config.get("remoteconfig.template") - : options.output; + + let filename = undefined; + if (shouldUseDefaultFilename) { + filename = options.config.src.remoteconfig!.template; + } else { + utils.assertIsString(options.output); + filename = options.output; + } + const outTemplate = { ...template }; delete outTemplate.version; fs.writeFileSync(filename, JSON.stringify(outTemplate, null, 2)); diff --git a/src/config.ts b/src/config.ts index f9d873d0cf2..afef38ecd3c 100644 --- a/src/config.ts +++ b/src/config.ts @@ -2,26 +2,28 @@ import { FirebaseConfig } from "./firebaseConfig"; -const _ = require("lodash"); -const clc = require("cli-color"); +import * as _ from "lodash"; +import * as clc from "cli-color"; +import * as fs from "fs-extra"; +import * as path from "path"; const cjson = require("cjson"); -const fs = require("fs-extra"); -const path = require("path"); -const detectProjectRoot = require("./detectProjectRoot").detectProjectRoot; -const { FirebaseError } = require("./error"); -const fsutils = require("./fsutils"); +import { detectProjectRoot } from "./detectProjectRoot"; +import { FirebaseError } from "./error"; +import * as fsutils from "./fsutils"; +import { promptOnce } from "./prompt"; +import { resolveProjectPath } from "./projectPath"; +import * as utils from "./utils"; +import { getValidator, getErrorMessage } from "./firebaseConfigValidate"; +import { logger } from "./logger"; const loadCJSON = require("./loadCJSON"); const parseBoltRules = require("./parseBoltRules"); -const { promptOnce } = require("./prompt"); -const { resolveProjectPath } = require("./projectPath"); -const utils = require("./utils"); - -type PlainObject = Record; export class Config { + static DEFAULT_FUNCTIONS_SOURCE = "functions"; + static FILENAME = "firebase.json"; - static MATERIALIZE_TARGETS = [ + static MATERIALIZE_TARGETS: Array = [ "database", "emulators", "firestore", @@ -39,6 +41,10 @@ export class Config { private _src: any; + /** + * @param src incoming firebase.json source, parsed by not validated. + * @param options command-line options. + */ constructor(src: any, options: any) { this.options = options || {}; this.projectDir = options.projectDir || detectProjectRoot(options); @@ -54,52 +60,43 @@ export class Config { ); } + // Move the deprecated top-level "rules" ket into the "database" object if (_.has(this._src, "rules")) { _.set(this._src, "database.rules", this._src.rules); } + // If a top-level key contains a string path pointing to a suported file + // type (JSON or Bolt), we read the file. + // + // TODO: This is janky and confusing behavior, we should remove it ASAP. Config.MATERIALIZE_TARGETS.forEach((target) => { if (_.get(this._src, target)) { - _.set(this.data, target, this._materialize(target)); + _.set(this.data, target, this.materialize(target)); } }); - // auto-detect functions from package.json in directory + // Auto-detect functions from package.json in directory if ( this.projectDir && !this.get("functions.source") && - fsutils.fileExistsSync(this.path("functions/package.json")) + fsutils.dirExistsSync(this.path("functions")) ) { - this.set("functions.source", "functions"); - } - } - - _hasDeepKey(obj: PlainObject, key: string) { - if (_.has(obj, key)) { - return true; - } - - for (const k in obj) { - if (obj.hasOwnProperty(k)) { - if (_.isPlainObject(obj[k]) && this._hasDeepKey(obj[k] as PlainObject, key)) { - return true; - } - } + this.set("functions.source", Config.DEFAULT_FUNCTIONS_SOURCE); } - return false; } - _materialize(target: string) { + materialize(target: string) { const val = _.get(this._src, target); - if (_.isString(val)) { - let out = this._parseFile(target, val); + if (typeof val === "string") { + let out = this.parseFile(target, val); // if e.g. rules.json has {"rules": {}} use that - const lastSegment = _.last(target.split(".")); - if (_.size(out) === 1 && _.has(out, lastSegment)) { + const segments = target.split("."); + const lastSegment = segments[segments.length - 1]; + if (Object.keys(out).length === 1 && _.has(out, lastSegment)) { out = out[lastSegment]; } return out; - } else if (_.isPlainObject(val) || _.isArray(val)) { + } else if (val !== null && typeof val === "object") { return val; } @@ -108,7 +105,7 @@ export class Config { }); } - _parseFile(target: string, filePath: string) { + parseFile(target: string, filePath: string) { const fullPath = resolveProjectPath(this.options, filePath); const ext = path.extname(filePath); if (!fsutils.fileExistsSync(fullPath)) { @@ -158,6 +155,10 @@ export class Config { } set(key: string, value: any) { + // TODO: We should really remove all instances of config.set() around the + // codebase but until we do we need this to prevent src from going stale. + _.set(this._src, key, value); + return _.set(this.data, key, value); } @@ -167,7 +168,7 @@ export class Config { path(pathName: string) { const outPath = path.normalize(path.join(this.projectDir, pathName)); - if (_.includes(path.relative(this.projectDir, outPath), "..")) { + if (path.relative(this.projectDir, outPath).includes("..")) { throw new FirebaseError(clc.bold(pathName) + " is outside of project directory", { exit: 1 }); } return outPath; @@ -231,6 +232,20 @@ export class Config { try { const filePath = path.resolve(pd, path.basename(filename)); const data = cjson.load(filePath); + + // Validate config against JSON Schema. For now we just print these to debug + // logs but in a future CLI version they could be warnings and/or errors. + const validator = getValidator(); + const valid = validator(data); + if (!valid && validator.errors) { + for (const e of validator.errors) { + // TODO: We should probably collapse these errors on the 'dataPath' property + // and then pick out the most important error on each field. Otherwise + // some simple mistakes can cause 2-3 errors. + logger.debug(getErrorMessage(e)); + } + } + return new Config(data, options); } catch (e) { throw new FirebaseError(`There was an error loading ${filename}:\n\n` + e.message, { diff --git a/src/database/rulesConfig.ts b/src/database/rulesConfig.ts index 17422452378..6299d225895 100644 --- a/src/database/rulesConfig.ts +++ b/src/database/rulesConfig.ts @@ -1,6 +1,8 @@ import { FirebaseError } from "../error"; import { Config } from "../config"; import { logger } from "../logger"; +import { Options } from "../options"; +import * as utils from "../utils"; export interface RulesInstanceConfig { instance: string; @@ -18,9 +20,9 @@ interface DatabaseConfig { */ export function normalizeRulesConfig( rulesConfig: RulesInstanceConfig[], - options: any + options: Options ): RulesInstanceConfig[] { - const config = options.config as Config; + const config = options.config; return rulesConfig.map((rc) => { return { instance: rc.instance, @@ -29,18 +31,15 @@ export function normalizeRulesConfig( }); } -export function getRulesConfig(projectId: string, options: any): RulesInstanceConfig[] { - // TODO(samstern): Config should be typed - const config = options.config as any; - - const dbConfig: { rules?: string } | DatabaseConfig[] | undefined = config.get("database"); - +export function getRulesConfig(projectId: string, options: Options): RulesInstanceConfig[] { + const dbConfig = options.config.src.database; if (dbConfig === undefined) { return []; } if (!Array.isArray(dbConfig)) { if (dbConfig && dbConfig.rules) { + utils.assertIsStringOrUndefined(options.instance); const instance = options.instance || `${options.project}-default-rtdb`; return [{ rules: dbConfig.rules, instance }]; } else { @@ -50,13 +49,14 @@ export function getRulesConfig(projectId: string, options: any): RulesInstanceCo } const results: RulesInstanceConfig[] = []; + const rc = options.rc as any; for (const c of dbConfig) { if (c.target) { // Make sure the target exists (this will throw otherwise) - options.rc.requireTarget(projectId, "database", c.target); + rc.requireTarget(projectId, "database", c.target); // Get a list of db instances the target maps to - const instances: string[] = options.rc.target(projectId, "database", c.target); + const instances: string[] = rc.target(projectId, "database", c.target); for (const i of instances) { results.push({ instance: i, rules: c.rules }); } diff --git a/src/deploy/firestore/prepare.ts b/src/deploy/firestore/prepare.ts index be2bd7b863b..f6e4807b8d9 100644 --- a/src/deploy/firestore/prepare.ts +++ b/src/deploy/firestore/prepare.ts @@ -4,14 +4,15 @@ import * as clc from "cli-color"; import loadCJSON = require("../../loadCJSON"); import { RulesDeploy, RulesetServiceType } from "../../rulesDeploy"; import utils = require("../../utils"); +import { Options } from "../../options"; /** * Prepares Firestore Rules deploys. * @param context The deploy context. * @param options The CLI options object. */ -async function prepareRules(context: any, options: any): Promise { - const rulesFile = options.config.get("firestore.rules"); +async function prepareRules(context: any, options: Options): Promise { + const rulesFile = options.config.src.firestore?.rules; if (context.firestoreRules && rulesFile) { const rulesDeploy = new RulesDeploy(options, RulesetServiceType.CLOUD_FIRESTORE); @@ -25,12 +26,12 @@ async function prepareRules(context: any, options: any): Promise { * @param context The deploy context. * @param options The CLI options object. */ -function prepareIndexes(context: any, options: any): void { - if (!context.firestoreIndexes || !options.config.get("firestore.indexes")) { +function prepareIndexes(context: any, options: Options): void { + if (!context.firestoreIndexes || !options.config.src.firestore?.indexes) { return; } - const indexesFileName = options.config.get("firestore.indexes"); + const indexesFileName = options.config.src.firestore.indexes; const indexesPath = options.config.path(indexesFileName); const parsedSrc = loadCJSON(indexesPath); diff --git a/src/deploy/functions/containerCleaner.ts b/src/deploy/functions/containerCleaner.ts new file mode 100644 index 00000000000..a5d4584c5d8 --- /dev/null +++ b/src/deploy/functions/containerCleaner.ts @@ -0,0 +1,209 @@ +// This code is very aggressive about running requests in parallel and does not use +// a task queue, because the quota limits for GCR.io are absurdly high. At the time +// of writing, we can make 50K requests per 10m. +// https://cloud.google.com/container-registry/quotas + +import * as clc from "cli-color"; + +import { containerRegistryDomain } from "../../api"; +import { logger } from "../../logger"; +import * as docker from "../../gcp/docker"; +import * as backend from "./backend"; +import * as utils from "../../utils"; + +// A flattening of container_registry_hosts and +// region_multiregion_map from regionconfig.borg +const SUBDOMAIN_MAPPING: Record = { + "us-west2": "us", + "us-west3": "us", + "us-west4": "us", + "us-central1": "us", + "us-central2": "us", + "us-east1": "us", + "us-east4": "us", + "northamerica-northeast1": "us", + "southamerica-east1": "us", + "europe-west1": "eu", + "europe-west2": "eu", + "europe-west3": "eu", + "europe-west5": "eu", + "europe-west6": "eu", + "europe-central2": "eu", + "asia-east1": "asia", + "asia-east2": "asia", + "asia-northeast1": "asia", + "asia-northeast2": "asia", + "asia-northeast3": "asia", + "asia-south1": "asia", + "asia-southeast2": "asia", + "australia-southeast1": "asia", +}; + +export async function cleanupBuildImages(functions: backend.FunctionSpec[]): Promise { + utils.logBullet(clc.bold.cyan("functions: ") + "cleaning up build files..."); + const gcrCleaner = new ContainerRegistryCleaner(); + const failedDomains: Set = new Set(); + await Promise.all( + functions.map((func) => + (async () => { + try { + await gcrCleaner.cleanupFunction(func); + } catch (err) { + const path = `${func.project}/${SUBDOMAIN_MAPPING[func.region]}/gcf`; + failedDomains.add(`https://console.cloud.google.com/gcr/images/${path}`); + } + })() + ) + ); + if (failedDomains.size) { + let message = + "Unhandled error cleaning up build images. This could result in a small monthly bill if not corrected. "; + message += + "You can attempt to delete these images by redeploying or you can delete them manually at"; + if (failedDomains.size == 1) { + message += " " + failedDomains.values().next().value; + } else { + message += [...failedDomains].map((domain) => "\n\t" + domain).join(""); + } + utils.logLabeledWarning("functions", message); + } + + // TODO: clean up Artifact Registry images as well. +} + +export class ContainerRegistryCleaner { + readonly helpers: Record = {}; + + private helper(location: string): DockerHelper { + const subdomain = SUBDOMAIN_MAPPING[location] || "us"; + if (!this.helpers[subdomain]) { + const origin = `https://${subdomain}.${containerRegistryDomain}`; + this.helpers[subdomain] = new DockerHelper(origin); + } + return this.helpers[subdomain]; + } + + // GCFv1 has the directory structure: + // gcf/ + // +- / + // +- + // +- (tags: _version-<#>) + // +- cache/ (Only present in first deploy of region) + // | +- (tags: latest) + // +- worker/ (Only present in first deploy of region) + // +- (tags: latest) + // + // We'll parallel search for the valid and their children + // until we find one with the right tag for the function name. + // The underlying Helper's caching should make this expensive for + // the first function and free for the next functions in the same + // region. + async cleanupFunction(func: backend.FunctionSpec): Promise { + const helper = this.helper(func.region); + const uuids = (await helper.ls(`${func.project}/gcf/${func.region}`)).children; + + const uuidTags: Record = {}; + const loadUuidTags: Promise[] = []; + for (const uuid of uuids) { + loadUuidTags.push( + (async () => { + const path = `${func.project}/gcf/${func.region}/${uuid}`; + const tags = (await helper.ls(path)).tags; + uuidTags[path] = tags; + })() + ); + } + await Promise.all(loadUuidTags); + + const extractFunction = /^(.*)_version-\d+$/; + const entry = Object.entries(uuidTags).find(([, tags]) => { + return tags.find((tag) => extractFunction.exec(tag)?.[1] === func.id); + }); + + if (!entry) { + logger.debug("Could not find image for function", backend.functionName(func)); + return; + } + await helper.rm(entry[0]); + } +} + +export interface Stat { + children: string[]; + digests: docker.Digest[]; + tags: docker.Tag[]; +} + +export class DockerHelper { + readonly client: docker.Client; + readonly cache: Record = {}; + + constructor(origin: string) { + this.client = new docker.Client(origin); + } + + async ls(path: string): Promise { + if (!this.cache[path]) { + const raw = await this.client.listTags(path); + this.cache[path] = { + tags: raw.tags, + digests: Object.keys(raw.manifest), + children: raw.child, + }; + } + return this.cache[path]; + } + + // While we can't guarantee all promises will succeed, we can do our darndest + // to expunge as much as possible before throwing. + async rm(path: string): Promise { + let toThrowLater: any = undefined; + const stat = await this.ls(path); + const recursive = stat.children.map((child) => + (async () => { + try { + await this.rm(`${path}/${child}`); + stat.children.splice(stat.children.indexOf(child), 1); + } catch (err) { + toThrowLater = err; + } + })() + ); + // Unlike a filesystem, we can delete a "directory" while its children are still being + // deleted. Run these in parallel to improve performance and just wait for the result + // before the function's end. + + // An image cannot be deleted until its tags have been removed. Do this in two phases. + const deleteTags = stat.tags.map((tag) => + (async () => { + try { + await this.client.deleteTag(path, tag); + stat.tags.splice(stat.tags.indexOf(tag), 1); + } catch (err) { + logger.debug("Got error trying to remove docker tag:", err); + toThrowLater = err; + } + })() + ); + await Promise.all(deleteTags); + + const deleteImages = stat.digests.map((digest) => + (async () => { + try { + await this.client.deleteImage(path, digest); + stat.digests.splice(stat.digests.indexOf(digest), 1); + } catch (err) { + logger.debug("Got error trying to remove docker image:", err); + toThrowLater = err; + } + })() + ); + await Promise.all(deleteImages); + + await Promise.all(recursive); + + if (toThrowLater) { + throw toThrowLater; + } + } +} diff --git a/src/deploy/functions/deploy.ts b/src/deploy/functions/deploy.ts index 0375d788601..d6219c4e004 100644 --- a/src/deploy/functions/deploy.ts +++ b/src/deploy/functions/deploy.ts @@ -10,6 +10,8 @@ import * as fs from "fs"; import * as gcs from "../../gcp/storage"; import * as gcf from "../../gcp/cloudfunctions"; import { Options } from "../../options"; +import { Config } from "../../config"; +import * as utils from "../../utils"; const GCP_REGION = functionsUploadRegion; @@ -45,7 +47,7 @@ export async function deploy( options: Options, payload: args.Payload ): Promise { - if (!options.config.get("functions")) { + if (!options.config.src.functions) { return; } @@ -56,7 +58,7 @@ export async function deploy( } try { - const want = options.config.get("functions.backend") as backend.Backend; + const want = payload.functions!.backend; const uploads: Promise[] = []; if (want.cloudFunctions.some((fn) => fn.apiVersion === 1)) { uploads.push(uploadSourceV1(context)); @@ -66,10 +68,14 @@ export async function deploy( } await Promise.all(uploads); + utils.assertDefined( + options.config.src.functions.source, + "Error: 'functions.source' is not defined" + ); logSuccess( clc.green.bold("functions:") + " " + - clc.bold(options.config.get("functions.source")) + + clc.bold(options.config.src.functions.source) + " folder uploaded successfully" ); } catch (err) { diff --git a/src/deploy/functions/prepare.ts b/src/deploy/functions/prepare.ts index b9d19e863c0..d845e6f1da4 100644 --- a/src/deploy/functions/prepare.ts +++ b/src/deploy/functions/prepare.ts @@ -1,6 +1,5 @@ import * as clc from "cli-color"; -import { FirebaseError } from "../../error"; import { Options } from "../../options"; import { ensureCloudBuildEnabled } from "./ensureCloudBuildEnabled"; import { functionMatchesAnyGroup, getFilterGroups } from "./functionsDeployHelper"; @@ -14,6 +13,7 @@ import * as functionsConfig from "../../functionsConfig"; import * as getProjectId from "../../getProjectId"; import * as runtimes from "./runtimes"; import * as validate from "./validate"; +import * as utils from "../../utils"; import { logger } from "../../logger"; export async function prepare( @@ -21,7 +21,7 @@ export async function prepare( options: Options, payload: args.Payload ): Promise { - if (!options.config.has("functions")) { + if (!options.config.src.functions) { return; } @@ -54,16 +54,20 @@ export async function prepare( logger.debug(`Analyzing ${runtimeDelegate.name} backend spec`); const wantBackend = await runtimeDelegate.discoverSpec(runtimeConfig, env); - options.config.set("functions.backend", wantBackend); + payload.functions = { backend: wantBackend }; if (backend.isEmptyBackend(wantBackend)) { return; } // Prepare the functions directory for upload, and set context.triggers. + utils.assertDefined( + options.config.src.functions.source, + "Error: 'functions.source' is not defined" + ); logBullet( clc.cyan.bold("functions:") + " preparing " + - clc.bold(options.config.get("functions.source")) + + clc.bold(options.config.src.functions.source) + " directory for uploading..." ); context.functionsSource = await prepareFunctionsUpload(runtimeConfig, options); @@ -87,11 +91,6 @@ export async function prepare( }) ); - // Build a regionMap, and duplicate functions for each region they are being deployed to. - payload.functions = { - backend: wantBackend, - }; - // Validate the function code that is being deployed. validate.functionIdsAreValid(wantBackend.cloudFunctions); diff --git a/src/deploy/functions/prepareFunctionsUpload.ts b/src/deploy/functions/prepareFunctionsUpload.ts index f200ec9a302..02d80e09df2 100644 --- a/src/deploy/functions/prepareFunctionsUpload.ts +++ b/src/deploy/functions/prepareFunctionsUpload.ts @@ -14,6 +14,7 @@ import * as utils from "../../utils"; import * as fsAsync from "../../fsAsync"; import * as args from "./args"; import { Options } from "../../options"; +import { Config } from "../../config"; const CONFIG_DEST_FILE = ".runtimeconfig.json"; @@ -74,7 +75,7 @@ async function packageSource(options: Options, sourceDir: string, configValues: // you're in the public dir when you deploy. // We ignore any CONFIG_DEST_FILE that already exists, and write another one // with current config values into the archive in the "end" handler for reader - const ignore = options.config.get("functions.ignore", ["node_modules", ".git"]) as string[]; + const ignore = options.config.src.functions?.ignore || ["node_modules", ".git"]; ignore.push( "firebase-debug.log", "firebase-debug.*.log", @@ -103,10 +104,16 @@ async function packageSource(options: Options, sourceDir: string, configValues: } ); } + + utils.assertDefined(options.config.src.functions); + utils.assertDefined( + options.config.src.functions.source, + "Error: 'functions.source' is not defined" + ); utils.logBullet( clc.cyan.bold("functions:") + " packaged " + - clc.bold(options.config.get("functions.source")) + + clc.bold(options.config.src.functions.source) + " (" + filesize(archive.pointer()) + ") for uploading" @@ -118,6 +125,12 @@ export async function prepareFunctionsUpload( runtimeConfig: backend.RuntimeConfigValues, options: Options ): Promise { - const sourceDir = options.config.path(options.config.get("functions.source") as string); + utils.assertDefined(options.config.src.functions); + utils.assertDefined( + options.config.src.functions.source, + "Error: 'functions.source' is not defined" + ); + + const sourceDir = options.config.path(options.config.src.functions.source); return packageSource(options, sourceDir, runtimeConfig); } diff --git a/src/deploy/functions/release.ts b/src/deploy/functions/release.ts index e53745d4e65..da1bf8177cf 100644 --- a/src/deploy/functions/release.ts +++ b/src/deploy/functions/release.ts @@ -7,12 +7,13 @@ import { getAppEngineLocation } from "../../functionsConfig"; import { promptForFunctionDeletion } from "./prompts"; import { DeploymentTimer } from "./deploymentTimer"; import { ErrorHandler } from "./errorHandler"; -import * as utils from "../../utils"; +import { Options } from "../../options"; +import * as args from "./args"; +import * as backend from "./backend"; +import * as containerCleaner from "./containerCleaner"; import * as helper from "./functionsDeployHelper"; import * as tasks from "./tasks"; -import * as backend from "./backend"; -import * as args from "./args"; -import { Options } from "../../options"; +import * as utils from "../../utils"; export async function release(context: args.Context, options: Options, payload: args.Payload) { if (!options.config.has("functions")) { @@ -133,6 +134,7 @@ export async function release(context: args.Context, options: Options, payload: ); } helper.logAndTrackDeployStats(cloudFunctionsQueue, errorHandler); + await containerCleaner.cleanupBuildImages(payload.functions!.backend.cloudFunctions); await helper.printTriggerUrls(context); errorHandler.printWarnings(); errorHandler.printErrors(); diff --git a/src/deploy/functions/runtimes/golang/index.ts b/src/deploy/functions/runtimes/golang/index.ts new file mode 100644 index 00000000000..ac310f1b349 --- /dev/null +++ b/src/deploy/functions/runtimes/golang/index.ts @@ -0,0 +1,168 @@ +import { promisify } from "util"; +import * as path from "path"; +import * as fs from "fs"; +import * as spawn from "cross-spawn"; + +import { FirebaseError } from "../../../../error"; +import { Options } from "../../../../options"; +import { logger } from "../../../../logger"; +import * as args from "../../args"; +import * as backend from "../../backend"; +import * as getProjectId from "../../../../getProjectId"; +import * as runtimes from ".."; + +export const ADMIN_SDK = "firebase.google.com/go/v4"; +export const FUNCTIONS_SDK = "github.com/FirebaseExtended/firebase-functions-go"; + +const VERSION_TO_RUNTIME: Record = { + "1.13": "go113", +}; + +export async function tryCreateDelegate( + context: args.Context, + options: Options +): Promise { + const relativeSourceDir = options.config.get("functions.source") as string; + const sourceDir = options.config.path(relativeSourceDir); + const goModPath = path.join(sourceDir, "go.mod"); + const projectId = getProjectId(options); + + let module: Module; + try { + const modBuffer = await promisify(fs.readFile)(goModPath); + module = parseModule(modBuffer.toString("utf8")); + } catch (err) { + logger.debug("Customer code is not Golang code (or they aren't using modules)"); + return; + } + + let runtime = options.config.get("functions.runtime"); + if (!runtime) { + if (!module.version) { + throw new FirebaseError("Could not detect Golang version from go.mod"); + } + if (!VERSION_TO_RUNTIME[module.version]) { + throw new FirebaseError( + `go.mod specifies Golang version ${ + module.version + } which is unsupported by Google Cloud Functions. Valid values are ${Object.keys( + VERSION_TO_RUNTIME + ).join(", ")}` + ); + } + runtime = VERSION_TO_RUNTIME[module.version]; + } + + return new Delegate(projectId, sourceDir, runtime, module); +} + +// A module can be much more complicated than this, but this is all we need so far. +// For a full reference, see https://golang.org/doc/modules/gomod-ref +interface Module { + module: string; + version: string; + dependencies: Record; +} + +export function parseModule(mod: string): Module { + const module: Module = { + module: "", + version: "", + dependencies: {}, + }; + const lines = mod.split("\n"); + let inRequire = false; + for (const line of lines) { + if (inRequire) { + const endRequireMatch = /\)/.exec(line); + if (endRequireMatch) { + inRequire = false; + continue; + } + + const requireMatch = /([^ ]+) (.*)/.exec(line); + if (requireMatch) { + module.dependencies[requireMatch[1]] = requireMatch[2]; + continue; + } + + if (line.trim()) { + logger.debug("Don't know how to handle line", line, "inside a mod.go require block"); + } + continue; + } + const modMatch = /^module (.*)$/.exec(line); + if (modMatch) { + module.module = modMatch[1]; + continue; + } + const versionMatch = /^go (\d+\.\d+)$/.exec(line); + if (versionMatch) { + module.version = versionMatch[1]; + continue; + } + + const requireMatch = /^require ([^ ]+) (.*)$/.exec(line); + if (requireMatch) { + module.dependencies[requireMatch[1]] = requireMatch[2]; + continue; + } + + const requireBlockMatch = /^require +\(/.exec(line); + if (requireBlockMatch) { + inRequire = true; + continue; + } + + if (line.trim()) { + logger.debug("Don't know how to handle line", line, "in mod.go"); + } + } + + if (!module.module) { + throw new FirebaseError("Module has no name"); + } + if (!module.version) { + throw new FirebaseError(`Module ${module.module} has no go version`); + } + + return module; +} + +export class Delegate { + public readonly name = "golang"; + + constructor( + private readonly projectId: string, + private readonly sourceDir: string, + public readonly runtime: runtimes.Runtime, + private readonly module: Module + ) {} + validate(): Promise { + throw new FirebaseError("Cannot yet analyze Go source code"); + } + + build(): Promise { + const res = spawn.sync("go", ["build"], { + cwd: this.sourceDir, + stdio: "inherit", + }); + if (res.error) { + logger.debug("Got error running go build", res); + throw new FirebaseError("Failed to build functions source", { children: [res.error] }); + } + + return Promise.resolve(); + } + + watch(): Promise<() => Promise> { + return Promise.resolve(() => Promise.resolve()); + } + + discoverSpec( + configValues: backend.RuntimeConfigValues, + envs: backend.EnvironmentVariables + ): Promise { + throw new FirebaseError("Cannot yet discover function specs"); + } +} diff --git a/src/deploy/functions/runtimes/index.ts b/src/deploy/functions/runtimes/index.ts index 1c8523ab5d3..28b62647be0 100644 --- a/src/deploy/functions/runtimes/index.ts +++ b/src/deploy/functions/runtimes/index.ts @@ -1,13 +1,18 @@ import { Options } from "../../../options"; import * as backend from "../backend"; import * as args from "../args"; +import * as golang from "./golang"; import * as node from "./node"; import * as validate from "../validate"; import { FirebaseError } from "../../../error"; /** Supported runtimes for new Cloud Functions. */ const RUNTIMES: string[] = ["nodejs10", "nodejs12", "nodejs14"]; -export type Runtime = typeof RUNTIMES[number]; +// Experimental runtimes are part of the Runtime type, but are in a +// different list to help guard against some day accidentally iterating over +// and printing a hidden runtime to the user. +const EXPERIMENTAL_RUNTIMES = ["go113"]; +export type Runtime = typeof RUNTIMES[number] | typeof EXPERIMENTAL_RUNTIMES[number]; /** Runtimes that can be found in existing backends but not used for new functions. */ const DEPRECATED_RUNTIMES = ["nodejs6", "nodejs8"]; @@ -20,7 +25,7 @@ export function isDeprecatedRuntime(runtime: string): runtime is DeprecatedRunti /** Type deduction helper for a runtime string. */ export function isValidRuntime(runtime: string): runtime is Runtime { - return RUNTIMES.includes(runtime); + return RUNTIMES.includes(runtime) || EXPERIMENTAL_RUNTIMES.includes(runtime); } const MESSAGE_FRIENDLY_RUNTIMES: Record = { @@ -29,6 +34,7 @@ const MESSAGE_FRIENDLY_RUNTIMES: Record = { nodejs10: "Node.js 10", nodejs12: "Node.js 12", nodejs14: "Node.js 14", + go113: "Go 1.13", }; /** @@ -94,7 +100,7 @@ export interface RuntimeDelegate { } type Factory = (context: args.Context, options: Options) => Promise; -const factories: Factory[] = [node.tryCreateDelegate]; +const factories: Factory[] = [node.tryCreateDelegate, golang.tryCreateDelegate]; export async function getRuntimeDelegate( context: args.Context, @@ -108,6 +114,13 @@ export async function getRuntimeDelegate( } validate.functionsDirectoryExists(options, sourceDirName); + // There isn't currently an easy way to map from runtime name to a delegate, but we can at least guarantee + // that any explicit runtime from firebase.json is valid + const runtime = options.config.get("functions.runtime"); + if (runtime && !isValidRuntime(runtime)) { + throw new FirebaseError("Cannot deploy function with runtime " + runtime); + } + for (const factory of factories) { const delegate = await factory(context, options); if (delegate) { diff --git a/src/deploy/functions/runtimes/node/index.ts b/src/deploy/functions/runtimes/node/index.ts index f40d5d7da65..941c450b2db 100644 --- a/src/deploy/functions/runtimes/node/index.ts +++ b/src/deploy/functions/runtimes/node/index.ts @@ -18,8 +18,8 @@ export async function tryCreateDelegate( context: args.Context, options: Options ): Promise { - const sourceDirName = options.config.get("functions.source") as string; - const sourceDir = options.config.path(sourceDirName); + const projectRelativeSourceDir = options.config.get("functions.source") as string; + const sourceDir = options.config.path(projectRelativeSourceDir); const packageJsonPath = path.join(sourceDir, "package.json"); if (!(await promisify(fs.exists)(packageJsonPath))) { @@ -40,13 +40,7 @@ export async function tryCreateDelegate( throw new FirebaseError(`Unexpected runtime ${runtime}`); } - return new Delegate( - getProjectId(options), - options.config.projectDir, - sourceDirName, - sourceDir, - runtime - ); + return new Delegate(getProjectId(options), options.config.projectDir, sourceDir, runtime); } // TODO(inlined): Consider moving contents in parseRuntimeAndValidateSDK and validate around. @@ -59,7 +53,6 @@ export class Delegate { constructor( private readonly projectId: string, private readonly projectDir: string, - private readonly sourceDirName: string, private readonly sourceDir: string, public readonly runtime: runtimes.Runtime ) {} @@ -78,7 +71,8 @@ export class Delegate { validate(): Promise { versioning.checkFunctionsSDKVersion(this.sdkVersion); - validate.packageJsonIsValid(this.sourceDirName, this.sourceDir, this.projectDir); + const relativeDir = path.relative(this.projectDir, this.sourceDir); + validate.packageJsonIsValid(relativeDir, this.sourceDir, this.projectDir); return Promise.resolve(); } diff --git a/src/deploy/functions/validate.ts b/src/deploy/functions/validate.ts index f1c91201b73..4773f23a659 100644 --- a/src/deploy/functions/validate.ts +++ b/src/deploy/functions/validate.ts @@ -31,12 +31,12 @@ export function functionsDirectoryExists( /** * Validate function names only contain letters, numbers, underscores, and hyphens - * and not exceed 62 characters in length. + * and not exceed 63 characters in length. * @param functionNames Object containing function names as keys. * @throws { FirebaseError } Function names must be valid. */ export function functionIdsAreValid(functions: { id: string }[]): void { - const validFunctionNameRegex = /^[a-zA-Z0-9_-]{1,62}$/; + const validFunctionNameRegex = /^[a-zA-Z0-9_-]{1,63}$/; const invalidIds = functions.filter((fn) => !validFunctionNameRegex.test(fn.id)); if (invalidIds.length !== 0) { const msg = diff --git a/src/deploy/remoteconfig/prepare.ts b/src/deploy/remoteconfig/prepare.ts index dc9503c066f..34ee2a674bc 100644 --- a/src/deploy/remoteconfig/prepare.ts +++ b/src/deploy/remoteconfig/prepare.ts @@ -2,12 +2,13 @@ import { getProjectNumber } from "../../getProjectNumber"; import loadCJSON = require("../../loadCJSON"); import { getEtag } from "./functions"; import { validateInputRemoteConfigTemplate } from "./functions"; +import { Options } from "../../options"; -export default async function (context: any, options: any): Promise { +export default async function (context: any, options: Options): Promise { if (!context) { return; } - const filePath = options.config.get("remoteconfig.template"); + const filePath = options.config.src.remoteconfig?.template; if (!filePath) { return; } diff --git a/src/deploy/storage/prepare.ts b/src/deploy/storage/prepare.ts index f849758c36d..a65c8c5538c 100644 --- a/src/deploy/storage/prepare.ts +++ b/src/deploy/storage/prepare.ts @@ -2,13 +2,14 @@ import * as _ from "lodash"; import gcp = require("../../gcp"); import { RulesDeploy, RulesetServiceType } from "../../rulesDeploy"; +import { Options } from "../../options"; /** * Prepares for a Firebase Storage deployment. * @param context The deploy context. * @param options The CLI options object. */ -export default async function (context: any, options: any): Promise { +export default async function (context: any, options: Options): Promise { let rulesConfig = options.config.get("storage"); if (!rulesConfig) { return; @@ -27,7 +28,7 @@ export default async function (context: any, options: any): Promise { rulesConfig.forEach((ruleConfig: any) => { if (ruleConfig.target) { - options.rc.requireTarget(context.projectId, "storage", ruleConfig.target); + (options.rc as any).requireTarget(context.projectId, "storage", ruleConfig.target); } rulesDeploy.addFile(ruleConfig.rules); }); diff --git a/src/emulator/constants.ts b/src/emulator/constants.ts index 4d22831325a..8813aeacd6f 100644 --- a/src/emulator/constants.ts +++ b/src/emulator/constants.ts @@ -115,14 +115,6 @@ export class Constants { return DEFAULT_PORTS[emulator]; } - static getHostKey(emulator: Emulators): string { - return `emulators.${emulator.toString()}.host`; - } - - static getPortKey(emulator: Emulators): string { - return `emulators.${emulator.toString()}.port`; - } - static description(name: Emulators): string { return EMULATOR_DESCRIPTION[name]; } diff --git a/src/emulator/controller.ts b/src/emulator/controller.ts index cd768aa1bb5..ac33660d22a 100644 --- a/src/emulator/controller.ts +++ b/src/emulator/controller.ts @@ -42,12 +42,11 @@ import { fileExistsSync } from "../fsutils"; import { StorageEmulator } from "./storage"; import { getDefaultDatabaseInstance } from "../getDefaultDatabaseInstance"; import { getProjectDefaultAccount } from "../auth"; +import { Options } from "../options"; +import { ParsedTriggerDefinition } from "./functionsEmulatorShared"; -async function getAndCheckAddress(emulator: Emulators, options: any): Promise
{ - let host = Constants.normalizeHost( - options.config.get(Constants.getHostKey(emulator), Constants.getDefaultHost(emulator)) - ); - +async function getAndCheckAddress(emulator: Emulators, options: Options): Promise
{ + let host = options.config.src.emulators?.[emulator]?.host || Constants.getDefaultHost(emulator); if (host === "localhost" && utils.isRunningInWSL()) { // HACK(https://github.com/firebase/firebase-tools-ui/issues/332): Use IPv4 // 127.0.0.1 instead of localhost. This, combined with the hack in @@ -58,11 +57,11 @@ async function getAndCheckAddress(emulator: Emulators, options: any): Promise { +export async function startAll(options: Options, showUI: boolean = true): Promise { // Emulators config is specified in firebase.json as: // "emulators": { // "firestore": { @@ -390,6 +385,7 @@ export async function startAll(options: any, showUI: boolean = true): Promise | undefined), }, - predefinedTriggers: options.extensionTriggers, + predefinedTriggers: options.extensionTriggers as ParsedTriggerDefinition[] | undefined, nodeMajorVersion: parseRuntimeVersion( options.extensionNodeVersion || options.config.get("functions.runtime") ), @@ -469,6 +473,7 @@ export async function startAll(options: any, showUI: boolean = true): Promise { +async function _fatal(emulator: Emulators, errorMsg: string): Promise { // if we do not issue a stopAll here and _fatal is called during startup, we could leave emulators running // that did start already // for example: JAVA_HOME=/does/not/exist firebase emulators:start try { - const logger = EmulatorLogger.forEmulator(emulator.name); + const logger = EmulatorLogger.forEmulator(emulator); logger.logLabeled( "WARN", - emulator.name, + emulator, `Fatal error occurred: \n ${errorMsg}, \n stopping all running emulators` ); await EmulatorRegistry.stopAll(); @@ -270,6 +270,18 @@ async function _fatal(emulator: DownloadableEmulatorDetails, errorMsg: string): } } +export async function handleEmulatorProcessError(emulator: Emulators, err: any): Promise { + const description = Constants.description(emulator); + if (err.path === "java" && err.code === "ENOENT") { + await _fatal( + emulator, + `${description} has exited because java is not installed, you can install it from https://openjdk.java.net/install/` + ); + } else { + await _fatal(emulator, `${description} has exited: ${err}`); + } +} + async function _runBinary( emulator: DownloadableEmulatorDetails, command: DownloadableEmulatorCommand, @@ -298,7 +310,7 @@ async function _runBinary( `Could not spawn child process for emulator, check that java is installed and on your $PATH.` ); } - _fatal(emulator, e); + _fatal(emulator.name, e); } const description = Constants.description(emulator.name); @@ -331,21 +343,15 @@ async function _runBinary( } }); - emulator.instance.on("error", async (err: any) => { - if (err.path === "java" && err.code === "ENOENT") { - await _fatal( - emulator, - `${description} has exited because java is not installed, you can install it from https://openjdk.java.net/install/` - ); - } else { - await _fatal(emulator, `${description} has exited: ${err}`); - } + emulator.instance.on("error", (err) => { + handleEmulatorProcessError(emulator.name, err); }); + emulator.instance.once("exit", async (code, signal) => { if (signal) { utils.logWarning(`${description} has exited upon receiving signal: ${signal}`); } else if (code && code !== 0 && code !== /* SIGINT */ 130) { - await _fatal(emulator, `${description} has exited with code: ${code}`); + await _fatal(emulator.name, `${description} has exited with code: ${code}`); } }); resolve(); diff --git a/src/emulator/functionsEmulatorRuntime.ts b/src/emulator/functionsEmulatorRuntime.ts index 7f52c2e12a0..ac4e98842de 100644 --- a/src/emulator/functionsEmulatorRuntime.ts +++ b/src/emulator/functionsEmulatorRuntime.ts @@ -1072,10 +1072,7 @@ async function initializeRuntime( if (extensionTriggers) { parsedDefinitions = extensionTriggers; } else { - require("../deploy/functions/discovery/jsexports/extractTriggers")( - triggerModule, - parsedDefinitions - ); + require("../deploy/functions/runtimes/node/extractTriggers")(triggerModule, parsedDefinitions); } const triggerDefinitions: EmulatedTriggerDefinition[] = emulatedFunctionsByRegion( parsedDefinitions diff --git a/src/emulator/functionsEmulatorShell.ts b/src/emulator/functionsEmulatorShell.ts index 9ab4559cdb4..c69009b78df 100644 --- a/src/emulator/functionsEmulatorShell.ts +++ b/src/emulator/functionsEmulatorShell.ts @@ -35,13 +35,13 @@ export class FunctionsEmulatorShell implements FunctionsShellController { } } - call(id: string, data: any, opts: any): void { - const trigger = this.getTrigger(id); - logger.debug(`shell:${id}: trigger=${JSON.stringify(trigger)}`); - logger.debug(`shell:${id}: opts=${JSON.stringify(opts)}, data=${JSON.stringify(data)}`); + call(name: string, data: any, opts: any): void { + const trigger = this.getTrigger(name); + logger.debug(`shell:${name}: trigger=${JSON.stringify(trigger)}`); + logger.debug(`shell:${name}: opts=${JSON.stringify(opts)}, data=${JSON.stringify(data)}`); if (!trigger.eventTrigger) { - throw new FirebaseError(`Function ${id} is not a background function`); + throw new FirebaseError(`Function ${name} is not a background function`); } const eventType = trigger.eventTrigger.eventType; @@ -64,16 +64,16 @@ export class FunctionsEmulatorShell implements FunctionsShellController { data, }; - this.emu.startFunctionRuntime(id, trigger.name, EmulatedTriggerType.BACKGROUND, proto); + this.emu.startFunctionRuntime(trigger.id, trigger.name, EmulatedTriggerType.BACKGROUND, proto); } - private getTrigger(id: string): EmulatedTriggerDefinition { + private getTrigger(name: string): EmulatedTriggerDefinition { const result = this.triggers.find((trigger) => { - return trigger.id === id; + return trigger.name === name; }); if (!result) { - throw new FirebaseError(`Could not find trigger ${id}`); + throw new FirebaseError(`Could not find trigger ${name}`); } return result; diff --git a/src/emulator/hubExport.ts b/src/emulator/hubExport.ts index 4ba51b523ec..e6e117d52f0 100644 --- a/src/emulator/hubExport.ts +++ b/src/emulator/hubExport.ts @@ -12,6 +12,7 @@ import { EmulatorHub } from "./hub"; import { getDownloadDetails } from "./downloadableEmulators"; import { DatabaseEmulator } from "./databaseEmulator"; import { StorageEmulator } from "./storage"; +import * as rimraf from "rimraf"; export interface FirestoreExportMetadata { version: string; @@ -45,7 +46,11 @@ export interface ExportMetadata { export class HubExport { static METADATA_FILE_NAME = "firebase-export-metadata.json"; - constructor(private projectId: string, private exportPath: string) {} + private tmpDir: string; + + constructor(private projectId: string, private exportPath: string) { + this.tmpDir = fs.mkdtempSync(`firebase-export-${new Date().getTime()}`); + } public static readMetadata(exportPath: string): ExportMetadata | undefined { const metadataPath = path.join(exportPath, this.METADATA_FILE_NAME); @@ -102,8 +107,20 @@ export class HubExport { await this.exportStorage(metadata); } - const metadataPath = path.join(this.exportPath, HubExport.METADATA_FILE_NAME); + // Make sure the export directory exists + if (!fs.existsSync(this.exportPath)) { + fs.mkdirSync(this.exportPath); + } + + // Write the metadata file after everything else has succeeded + const metadataPath = path.join(this.tmpDir, HubExport.METADATA_FILE_NAME); fs.writeFileSync(metadataPath, JSON.stringify(metadata, undefined, 2)); + + // Remove any existing data in the directory and then swap it with the + // temp directory. + logger.debug(`hubExport: swapping ${this.tmpDir} with ${this.exportPath}`); + rimraf.sync(this.exportPath); + fse.moveSync(this.tmpDir, this.exportPath); } private async exportFirestore(metadata: ExportMetadata): Promise { @@ -112,7 +129,7 @@ export class HubExport { const firestoreExportBody = { database: `projects/${this.projectId}/databases/(default)`, - export_directory: this.exportPath, + export_directory: this.tmpDir, export_name: metadata.firestore!!.path, }; @@ -155,12 +172,7 @@ export class HubExport { } } - // Make sure the export directory exists - if (!fs.existsSync(this.exportPath)) { - fs.mkdirSync(this.exportPath); - } - - const dbExportPath = path.join(this.exportPath, metadata.database!.path); + const dbExportPath = path.join(this.tmpDir, metadata.database!.path); if (!fs.existsSync(dbExportPath)) { fs.mkdirSync(dbExportPath); } @@ -185,7 +197,7 @@ export class HubExport { private async exportAuth(metadata: ExportMetadata): Promise { const { host, port } = EmulatorRegistry.get(Emulators.AUTH)!.getInfo(); - const authExportPath = path.join(this.exportPath, metadata.auth!.path); + const authExportPath = path.join(this.tmpDir, metadata.auth!.path); if (!fs.existsSync(authExportPath)) { fs.mkdirSync(authExportPath); } @@ -221,7 +233,7 @@ export class HubExport { const storageEmulator = EmulatorRegistry.get(Emulators.STORAGE) as StorageEmulator; // Clear the export - const storageExportPath = path.join(this.exportPath, metadata.storage!.path); + const storageExportPath = path.join(this.tmpDir, metadata.storage!.path); if (fs.existsSync(storageExportPath)) { fse.removeSync(storageExportPath); } diff --git a/src/emulator/portUtils.ts b/src/emulator/portUtils.ts index e9a6a6a6278..d911c054541 100644 --- a/src/emulator/portUtils.ts +++ b/src/emulator/portUtils.ts @@ -109,7 +109,7 @@ export function suggestUnrestricted(port: number): number { export async function findAvailablePort( host: string, start: number, - avoidRestricted: boolean = true + avoidRestricted = true ): Promise { const openPort = await pf.getPortPromise({ host, port: start }); @@ -139,7 +139,7 @@ export async function checkPortOpen(port: number, host: string): Promise { const interval = 250; - const timeout = 30000; + const timeout = 60000; try { await tcpport.waitUntilUsedOnHost(port, host, interval, timeout); } catch (e) { diff --git a/src/emulator/storage/apis/firebase.ts b/src/emulator/storage/apis/firebase.ts index fcb93132497..d1c1f9034b0 100644 --- a/src/emulator/storage/apis/firebase.ts +++ b/src/emulator/storage/apis/firebase.ts @@ -201,6 +201,10 @@ export function createFirebaseEndpoints(emulator: StorageEmulator): Router { return; } + if (!md.downloadTokens.length) { + md.addDownloadToken(); + } + res.json(new OutgoingFirebaseMetadata(md)); }); diff --git a/src/emulator/storage/metadata.ts b/src/emulator/storage/metadata.ts index 31edbffc78e..50a7015f15e 100644 --- a/src/emulator/storage/metadata.ts +++ b/src/emulator/storage/metadata.ts @@ -65,7 +65,7 @@ export class StoredFileMetadata { this.customTime = opts.customTime; this.contentEncoding = opts.contentEncoding || "identity"; this.customMetadata = opts.customMetadata; - this.downloadTokens = []; + this.downloadTokens = opts.downloadTokens || []; // Special handling for date fields this.timeCreated = opts.timeCreated ? new Date(opts.timeCreated) : new Date(); @@ -132,10 +132,15 @@ export class StoredFileMetadata { } private setDownloadTokensFromCustomMetadata() { - if (!this.customMetadata) return; + if (!this.customMetadata) { + return; + } if (this.customMetadata.firebaseStorageDownloadTokens) { - this.downloadTokens = this.customMetadata.firebaseStorageDownloadTokens.split(","); + this.downloadTokens = [ + ...this.downloadTokens, + ...this.customMetadata.firebaseStorageDownloadTokens.split(","), + ]; delete this.customMetadata.firebaseStorageDownloadTokens; } } diff --git a/src/emulator/storage/rules/runtime.ts b/src/emulator/storage/rules/runtime.ts index 24d00b95220..b153bca85b5 100644 --- a/src/emulator/storage/rules/runtime.ts +++ b/src/emulator/storage/rules/runtime.ts @@ -21,7 +21,11 @@ import * as utils from "../../../utils"; import { Constants } from "../../constants"; import { downloadEmulator } from "../../download"; import * as fs from "fs-extra"; -import { DownloadDetails, _getCommand } from "../../downloadableEmulators"; +import { + _getCommand, + DownloadDetails, + handleEmulatorProcessError, +} from "../../downloadableEmulators"; export interface RulesetVerificationOpts { file: { @@ -142,16 +146,22 @@ export class StorageRulesRuntime { }; }); + // This catches error when spawning the java process + this._childprocess.on("error", (err) => { + handleEmulatorProcessError(Emulators.STORAGE, err); + }); + + // This catches errors from the java process (i.e. missing jar file) this._childprocess.stderr.on("data", (buf: Buffer) => { const error = buf.toString(); - if (error.includes("Invalid or corrupt jarfile")) { + if (error.includes("jarfile")) { throw new FirebaseError( "There was an issue starting the rules emulator, please run 'firebase setup:emulators:storage` again" ); } else { EmulatorLogger.forEmulator(Emulators.STORAGE).log( "WARN", - `Unexpected rules runtime output: ${buf.toString()}` + `Unexpected rules runtime error: ${buf.toString()}` ); } }); diff --git a/src/extensions/emulator/optionsHelper.ts b/src/extensions/emulator/optionsHelper.ts index d5dd80c6db4..8e41d1991aa 100644 --- a/src/extensions/emulator/optionsHelper.ts +++ b/src/extensions/emulator/optionsHelper.ts @@ -130,7 +130,7 @@ function buildConfig( } } - if (config.get("functions")) { + if (config.src.functions) { // Switch functions source to what is provided in the extension.yaml // to match the behavior of deployed extensions. const sourceDirectory = getFunctionSourceDirectory(functionResources); diff --git a/src/extensions/extensionsHelper.ts b/src/extensions/extensionsHelper.ts index 22dbacff4f0..37fc50b8b38 100644 --- a/src/extensions/extensionsHelper.ts +++ b/src/extensions/extensionsHelper.ts @@ -41,6 +41,7 @@ export enum SpecParamType { SELECT = "select", MULTISELECT = "multiSelect", STRING = "string", + SELECTRESOURCE = "selectResource", } export enum SourceOrigin { @@ -302,6 +303,15 @@ export function validateSpec(spec: any) { } } } + if (param.type && param.type == SpecParamType.SELECTRESOURCE) { + if (!param.resourceType) { + errors.push( + `Param${param.param ? ` ${param.param}` : ""} must have resourceType because it is type ${ + param.type + }` + ); + } + } } if (errors.length) { const formatted = errors.map((error) => ` - ${error}`); diff --git a/src/extensions/provisioningHelper.ts b/src/extensions/provisioningHelper.ts new file mode 100644 index 00000000000..38f2753b4a8 --- /dev/null +++ b/src/extensions/provisioningHelper.ts @@ -0,0 +1,116 @@ +import * as marked from "marked"; + +import * as extensionsApi from "./extensionsApi"; +import * as api from "../api"; +import { FirebaseError } from "../error"; + +/** Product for which provisioning can be (or is) deferred */ +export enum DeferredProduct { + STORAGE, + AUTH, +} + +/** + * Checks whether products used by the extension require provisioning. + * + * @param spec extension spec + */ +export async function checkProductsProvisioned( + projectId: string, + spec: extensionsApi.ExtensionSpec +): Promise { + const usedProducts = getUsedProducts(spec); + const needProvisioning = [] as DeferredProduct[]; + let isStorageProvisionedPromise; + let isAuthProvisionedPromise; + if (usedProducts.includes(DeferredProduct.STORAGE)) { + isStorageProvisionedPromise = isStorageProvisioned(projectId); + } + if (usedProducts.includes(DeferredProduct.AUTH)) { + isAuthProvisionedPromise = isAuthProvisioned(projectId); + } + + if (isStorageProvisionedPromise && !(await isStorageProvisionedPromise)) { + needProvisioning.push(DeferredProduct.STORAGE); + } + if (isAuthProvisionedPromise && !(await isAuthProvisionedPromise)) { + needProvisioning.push(DeferredProduct.AUTH); + } + + if (needProvisioning.length > 0) { + let errorMessage = + "Some services used by this extension have not been set up on your " + + "Firebase project. To ensure this extension works as intended, you must enable these " + + "services by following the provided links, then retry installing the extension\n\n"; + if (needProvisioning.includes(DeferredProduct.STORAGE)) { + errorMessage += + " - Firebase Storage: store and retrieve user-generated files like images, audio, and " + + "video without server-side code.\n"; + errorMessage += ` https://console.firebase.google.com/project/${projectId}/storage`; + errorMessage += "\n"; + } + if (needProvisioning.includes(DeferredProduct.AUTH)) { + errorMessage += + " - Firebase Authentication: authenticate and manage users from a variety of providers " + + "without server-side code.\n"; + errorMessage += ` https://console.firebase.google.com/project/${projectId}/authentication/users`; + } + throw new FirebaseError(marked(errorMessage), { exit: 2 }); + } +} + +/** + * From the spec determines which products are used by the extension and + * returns the list. + */ +export function getUsedProducts(spec: extensionsApi.ExtensionSpec): DeferredProduct[] { + const usedProducts: DeferredProduct[] = []; + const usedApis = spec.apis?.map((api) => api.apiName); + const usedRoles = spec.roles?.map((r) => r.role.split(".")[0]); + const usedTriggers = spec.resources.map((r) => getTriggerType(r.propertiesYaml)); + if ( + usedApis?.includes("storage-component.googleapis.com") || + usedRoles?.includes("storage") || + usedTriggers.find((t) => t?.startsWith("google.storage.")) + ) { + usedProducts.push(DeferredProduct.STORAGE); + } + if ( + usedApis?.includes("identitytoolkit.googleapis.com") || + usedRoles?.includes("firebaseauth") || + usedTriggers.find((t) => t?.startsWith("providers/firebase.auth/")) + ) { + usedProducts.push(DeferredProduct.AUTH); + } + return usedProducts; +} + +/** + * Parses out trigger eventType from the propertiesYaml. + */ +function getTriggerType(propertiesYaml: string | undefined) { + return propertiesYaml?.match(/eventType:\ ([\S]+)/)?.[1]; +} + +async function isStorageProvisioned(projectId: string): Promise { + const resp = await api.request("GET", `/v1beta/projects/${projectId}/buckets`, { + auth: true, + origin: api.firebaseStorageOrigin, + }); + return !!resp.body?.buckets?.find((bucket: any) => { + const bucketResourceName = bucket.name; + // Bucket resource name looks like: projects/PROJECT_NUMBER/buckets/BUCKET_NAME + // and we just need the BUCKET_NAME part. + const bucketResourceNameTokens = bucketResourceName.split("/"); + const pattern = "^" + projectId + "(.[[a-z0-9]+)*.appspot.com$"; + return new RegExp(pattern).test(bucketResourceNameTokens[bucketResourceNameTokens.length - 1]); + }); +} + +async function isAuthProvisioned(projectId: string): Promise { + const resp = await api.request("GET", `/v1/projects/${projectId}/products`, { + auth: true, + origin: api.firedataOrigin, + }); + return !!resp.body?.activation?.map((a: any) => a.service).includes("FIREBASE_AUTH"); +} diff --git a/src/extensions/updateHelper.ts b/src/extensions/updateHelper.ts index 4cdfb13daa1..4fad0bf50b5 100644 --- a/src/extensions/updateHelper.ts +++ b/src/extensions/updateHelper.ts @@ -52,16 +52,11 @@ export async function getExistingSourceOrigin( const registryEntry = await resolveSource.resolveRegistryEntry(extensionName); if (resolveSource.isOfficialSource(registryEntry, existingSource)) { existingSourceOrigin = SourceOrigin.OFFICIAL_EXTENSION; - } else { - existingSourceOrigin = SourceOrigin.PUBLISHED_EXTENSION; - } - } catch { - // If registry entry does not exist, assume existing source was from local directory or URL. - if (urlRegex.test(existingSource)) { - existingSourceOrigin = SourceOrigin.URL; } else { existingSourceOrigin = SourceOrigin.LOCAL; } + } catch { + existingSourceOrigin = SourceOrigin.LOCAL; } return existingSourceOrigin; } diff --git a/src/firebaseConfig.ts b/src/firebaseConfig.ts index 7b1399f917a..facef237550 100644 --- a/src/firebaseConfig.ts +++ b/src/firebaseConfig.ts @@ -1,18 +1,27 @@ +// +// NOTE: +// The contents of this file are used to generate the JSON Schema documents in +// the schema/ directory. After changing this file you will need to run +// 'npm run generate:json-schema' to regenerate the schema files. +// + export type DatabaseConfig = | { - rules: string; + rules?: string; } | { - target: string; + target?: string; + instance?: string; rules: string; }[]; export type FirestoreConfig = { - rules: string; - indexes: string; + rules?: string; + indexes?: string; }; export type FunctionsConfig = { + // TODO: Add types for "backend" and "runtime" source?: string; ignore?: string[]; predeploy?: string[]; @@ -68,32 +77,44 @@ export type RemoteConfigConfig = { export type EmulatorsConfig = { auth?: { host?: string; - port?: string; + port?: number; }; database?: { host?: string; - port?: string; + port?: number; }; firestore?: { host?: string; - port?: string; + port?: number; + }; + functions?: { + host?: string; + port?: number; }; hosting?: { host?: string; - port?: string; + port?: number; }; pubsub?: { host?: string; - port?: string; + port?: number; }; storage?: { host?: string; - port?: string; + port?: number; + }; + logging?: { + host?: string; + port?: number; + }; + hub?: { + host?: string; + port?: number; }; ui?: { enabled?: boolean; host?: string; - port?: string; + port?: number | string; }; }; @@ -104,5 +125,5 @@ export type FirebaseConfig = { hosting?: HostingConfig; storage?: StorageConfig; remoteconfig?: RemoteConfigConfig; - emulators: EmulatorsConfig; + emulators?: EmulatorsConfig; }; diff --git a/src/firebaseConfigValidate.ts b/src/firebaseConfigValidate.ts new file mode 100644 index 00000000000..a2ed3291e24 --- /dev/null +++ b/src/firebaseConfigValidate.ts @@ -0,0 +1,43 @@ +// Note: we are using ajv version 6.x because it's compatible with TypeScript +// 3.x, if we upgrade the TS version in this project we can upgrade ajv as well. +import { ValidateFunction, ErrorObject } from "ajv"; +import * as fs from "fs"; +import * as path from "path"; + +const Ajv = require("ajv"); + +const ajv = new Ajv(); +let _VALIDATOR: ValidateFunction | undefined = undefined; + +/** + * Lazily load the 'schema/firebase-config.json' file and return an AJV validation + * function. By doing this lazily we don't impose this I/O cost on those using + * the CLI as a Node module. + */ +export function getValidator(): ValidateFunction { + if (!_VALIDATOR) { + const schemaStr = fs.readFileSync( + path.resolve(__dirname, "../schema/firebase-config.json"), + "UTF-8" + ); + const schema = JSON.parse(schemaStr); + + _VALIDATOR = ajv.compile(schema); + } + + return _VALIDATOR!; +} + +export function getErrorMessage(e: ErrorObject) { + if (e.keyword === "additionalProperties") { + return `Object "${e.dataPath}" in "firebase.json" has unknown property: ${JSON.stringify( + e.params + )}`; + } else if (e.keyword === "required") { + return `Object "${ + e.dataPath + }" in "firebase.json" is missing required property: ${JSON.stringify(e.params)}`; + } else { + return `Field "${e.dataPath}" in "firebase.json" is possibly invalid: ${e.message}`; + } +} diff --git a/src/functionsShellCommandAction.ts b/src/functionsShellCommandAction.ts index b54aa601e55..7f7dd859857 100644 --- a/src/functionsShellCommandAction.ts +++ b/src/functionsShellCommandAction.ts @@ -15,11 +15,12 @@ import { EMULATORS_SUPPORTED_BY_FUNCTIONS, EmulatorInfo, Emulators } from "./emu import { EmulatorHubClient } from "./emulator/hubClient"; import { Constants } from "./emulator/constants"; import { findAvailablePort } from "./emulator/portUtils"; +import { Options } from "./options"; const serveFunctions = new FunctionsServer(); -export const actionFunction = async (options: any) => { - if (options.port) { +export const actionFunction = async (options: Options) => { + if (typeof options.port === "string") { options.port = parseInt(options.port, 10); } @@ -28,6 +29,7 @@ export const actionFunction = async (options: any) => { debugPort = commandUtils.parseInspectionPort(options); } + utils.assertDefined(options.project); const hubClient = new EmulatorHubClient(options.project); let remoteEmulators: Record = {}; @@ -52,7 +54,7 @@ export const actionFunction = async (options: any) => { } else if (!options.port) { // If the user did not pass in any port and the functions emulator is not already running, we can // use the port defined for the Functions emulator in their firebase.json - options.port = options.config.get(Constants.getPortKey(Emulators.FUNCTIONS), undefined); + options.port = options.config.src.emulators?.functions?.port; } // If the port was not set by the --port flag or determined from 'firebase.json', just scan diff --git a/src/gcp/docker.ts b/src/gcp/docker.ts new file mode 100644 index 00000000000..935bb095132 --- /dev/null +++ b/src/gcp/docker.ts @@ -0,0 +1,88 @@ +// Note: unlike Google APIs, the documentation for the GCR API is +// actually the Docker REST API. This can be found at +// https://docs.docker.com/registry/spec/api/ +// This API is _very_ complex in its entirety and is very subtle (e.g. tags and digests +// are both strings and can both be put in the same route to get completely different +// response document types). +// This file will only implement a minimal subset as needed. +import { FirebaseError } from "../error"; +import * as api from "../apiv2"; + +// A Digest is a string in the format :. For example: +// sha256:146d8c9dff0344fb01417ef28673ed196e38215f3c94837ae733d3b064ba439e +export type Digest = string; +export type Tag = string; + +export interface Tags { + name: string; + tags: string[]; + + // These fields are not documented in the Docker API but are + // present in the GCR API. + manifest: Record; + child: string[]; +} + +export interface ImageInfo { + // times are string milliseconds + timeCreatedMs: string; + timeUploadedMs: string; + tag: string[]; + mediaType: string; + imageSizeBytes: string; + layerId: string; +} + +interface ErrorsResponse { + errors?: { + code: string; + message: string; + details: unknown; + }[]; +} + +function isErrors(response: unknown): response is ErrorsResponse { + return Object.prototype.hasOwnProperty.call(response, "errors"); +} + +const API_VERSION = "v2"; + +export class Client { + readonly client: api.Client; + + constructor(origin: string) { + this.client = new api.Client({ + apiVersion: API_VERSION, + auth: true, + urlPrefix: origin, + }); + } + + async listTags(path: string): Promise { + const response = await this.client.get(`${path}/tags/list`); + if (isErrors(response.body)) { + throw new FirebaseError(`Failed to list GCR tags at ${path}`, { + children: response.body.errors, + }); + } + return response.body; + } + + async deleteTag(path: string, tag: Tag): Promise { + const response = await this.client.delete(`${path}/manifests/${tag}`); + if (response.body.errors?.length != 0) { + throw new FirebaseError(`Failed to delete tag ${tag} at path ${path}`, { + children: response.body.errors, + }); + } + } + + async deleteImage(path: string, digest: Digest): Promise { + const response = await this.client.delete(`${path}/manifests/${digest}`); + if (response.body.errors?.length != 0) { + throw new FirebaseError(`Failed to delete image ${digest} at path ${path}`, { + children: response.body.errors, + }); + } + } +} diff --git a/src/init/features/database.ts b/src/init/features/database.ts index 0471c0228c7..1d68259cb11 100644 --- a/src/init/features/database.ts +++ b/src/init/features/database.ts @@ -19,7 +19,7 @@ import { getDefaultDatabaseInstance } from "../../getDefaultDatabaseInstance"; import { FirebaseError } from "../../error"; interface DatabaseSetup { - projectId: string; + projectId?: string; instance?: string; config?: DatabaseSetupConfig; } @@ -71,6 +71,7 @@ async function createDefaultDatabaseInstance(project: string): Promise { - setup.config = {}; - await ensure(setup.projectId, "firebasedatabase.googleapis.com", "database", false); - logger.info(); - setup.instance = - setup.instance || (await getDefaultDatabaseInstance({ project: setup.projectId })); + setup.config = setup.config || {}; + let instanceDetails; - if (setup.instance !== "") { - instanceDetails = await getDatabaseInstanceDetails(setup.projectId, setup.instance); - } else { - const confirm = await promptOnce({ - type: "confirm", - name: "confirm", - default: true, - message: - "It seems like you haven’t initialized Realtime Database in your project yet. Do you want to set it up?", - }); - if (confirm) { - instanceDetails = await createDefaultDatabaseInstance(setup.projectId); + if (setup.projectId) { + await ensure(setup.projectId, "firebasedatabase.googleapis.com", "database", false); + logger.info(); + setup.instance = + setup.instance || (await getDefaultDatabaseInstance({ project: setup.projectId })); + if (setup.instance !== "") { + instanceDetails = await getDatabaseInstanceDetails(setup.projectId, setup.instance); + } else { + const confirm = await promptOnce({ + type: "confirm", + name: "confirm", + default: true, + message: + "It seems like you haven’t initialized Realtime Database in your project yet. Do you want to set it up?", + }); + if (confirm) { + instanceDetails = await createDefaultDatabaseInstance(setup.projectId); + } } } @@ -165,13 +169,15 @@ export async function doSetup(setup: DatabaseSetup, config: Config): Promise} */ - var enableApis = Promise.resolve(); - if (projectId) { - enableApis = requirePermissions({ ...options, project: projectId }).then(() => { - return Promise.all([ - enableApi(projectId, "cloudfunctions.googleapis.com"), - enableApi(projectId, "runtimeconfig.googleapis.com"), - ]); - }); - } - return enableApis.then(function () { - return prompt(setup.functions, [ - { - type: "list", - name: "language", - message: "What language would you like to use to write Cloud Functions?", - default: "javascript", - choices: [ - { - name: "JavaScript", - value: "javascript", - }, - { - name: "TypeScript", - value: "typescript", - }, - ], - }, - ]).then(function () { - return require("./" + setup.functions.language)(setup, config); - }); - }); -}; diff --git a/src/init/features/functions/index.ts b/src/init/features/functions/index.ts new file mode 100644 index 00000000000..eca63d2cbf9 --- /dev/null +++ b/src/init/features/functions/index.ts @@ -0,0 +1,52 @@ +import * as clc from "cli-color"; + +import { logger } from "../../../logger"; +import { promptOnce } from "../../../prompt"; +import { requirePermissions } from "../../../requirePermissions"; +import { previews } from "../../../previews"; +import { Options } from "../../../options"; +import * as ensureApiEnabled from "../../../ensureApiEnabled"; + +module.exports = async function (setup: any, config: any, options: Options) { + logger.info(); + logger.info( + "A " + clc.bold("functions") + " directory will be created in your project with sample code" + ); + logger.info( + "pre-configured. Functions can be deployed with " + clc.bold("firebase deploy") + "." + ); + logger.info(); + + setup.functions = {}; + const projectId = setup?.rcfile?.projects?.default; + if (projectId) { + await requirePermissions({ ...options, project: projectId }); + await Promise.all([ + ensureApiEnabled.enable(projectId, "cloudfunctions.googleapis.com"), + ensureApiEnabled.enable(projectId, "runtimeconfig.googleapis.com"), + ]); + } + const choices = [ + { + name: "JavaScript", + value: "javascript", + }, + { + name: "TypeScript", + value: "typescript", + }, + ]; + if (previews.golang) { + choices.push({ + name: "Go", + value: "golang", + }); + } + const language = await promptOnce({ + type: "list", + message: "What language would you like to use to write Cloud Functions?", + default: "javascript", + choices, + }); + return require("./" + language)(setup, config); +}; diff --git a/src/init/features/storage.ts b/src/init/features/storage.ts index 096e8f9d984..0db674109c5 100644 --- a/src/init/features/storage.ts +++ b/src/init/features/storage.ts @@ -27,5 +27,5 @@ export async function doSetup(setup: any, config: any): Promise { default: "storage.rules", }); setup.config.storage.rules = storageRulesFile; - config.writeProjectFile(setup.config.storage.rules, RULES_TEMPLATE); + await config.askWriteProjectFile(setup.config.storage.rules, RULES_TEMPLATE); } diff --git a/src/management/database.ts b/src/management/database.ts index 509be437d45..c96b3007457 100644 --- a/src/management/database.ts +++ b/src/management/database.ts @@ -29,6 +29,7 @@ export enum DatabaseInstanceState { export enum DatabaseLocation { US_CENTRAL1 = "us-central1", EUROPE_WEST1 = "europe-west1", + ASIA_SOUTHEAST1 = "asia-southeast1", ANY = "-", } @@ -206,15 +207,17 @@ export function parseDatabaseLocation( return defaultLocation; } switch (location.toLowerCase()) { - case "europe-west1": - return DatabaseLocation.EUROPE_WEST1; case "us-central1": return DatabaseLocation.US_CENTRAL1; + case "europe-west1": + return DatabaseLocation.EUROPE_WEST1; + case "asia-southeast1": + return DatabaseLocation.ASIA_SOUTHEAST1; case "": return defaultLocation; default: throw new FirebaseError( - `Unexpected location value: ${location}. Only us-central1, and europe-west1 locations are supported` + `Unexpected location value: ${location}. Only us-central1, europe-west1, and asia-southeast1 locations are supported` ); } } diff --git a/src/options.ts b/src/options.ts index 1ad4a0c3c48..1ff547c543d 100644 --- a/src/options.ts +++ b/src/options.ts @@ -5,13 +5,19 @@ import { Config } from "./config"; export interface Options { cwd: string; configPath: string; - // OMITTED: project. Use context.projectId instead only: string; config: Config; filteredTargets: string[]; - nonInteractive: boolean; force: boolean; + // Options which are present on every command + project?: string; + account?: string; + json: boolean; + nonInteractive: boolean; + interactive: boolean; + debug: boolean; + // TODO(samstern): Remove this once options is better typed [key: string]: unknown; } diff --git a/src/previews.ts b/src/previews.ts index d4ad0036a70..69128035642 100644 --- a/src/previews.ts +++ b/src/previews.ts @@ -7,6 +7,7 @@ interface PreviewFlags { extdev: boolean; rtdbmanagement: boolean; functionsv2: boolean; + golang: boolean; } export const previews: PreviewFlags = { @@ -16,6 +17,7 @@ export const previews: PreviewFlags = { extdev: false, rtdbmanagement: false, functionsv2: false, + golang: false, ...configstore.get("previews"), }; diff --git a/src/serve/functions.ts b/src/serve/functions.ts index cafd1967a50..6fa3bffd42a 100644 --- a/src/serve/functions.ts +++ b/src/serve/functions.ts @@ -4,6 +4,9 @@ import { EmulatorServer } from "../emulator/emulatorServer"; import { parseRuntimeVersion } from "../emulator/functionsEmulatorUtils"; import * as getProjectId from "../getProjectId"; import { getProjectDefaultAccount } from "../auth"; +import { Options } from "../options"; +import { Config } from "../config"; +import * as utils from "../utils"; // TODO(samstern): It would be better to convert this to an EmulatorServer // but we don't have the "options" object until start() is called. @@ -16,12 +19,15 @@ export class FunctionsServer { } } - async start(options: any, partialArgs: Partial): Promise { + async start(options: Options, partialArgs: Partial): Promise { const projectId = getProjectId(options, false); - const functionsDir = path.join( - options.config.projectDir, - options.config.get("functions.source") + utils.assertDefined(options.config.src.functions); + utils.assertDefined( + options.config.src.functions.source, + "Error: 'functions.source' is not defined" ); + + const functionsDir = path.join(options.config.projectDir, options.config.src.functions.source); const account = getProjectDefaultAccount(options.config.projectDir); const nodeMajorVersion = parseRuntimeVersion(options.config.get("functions.runtime")); @@ -37,6 +43,7 @@ export class FunctionsServer { }; if (options.host) { + utils.assertIsStringOrUndefined(options.host); args.host = options.host; } @@ -44,11 +51,14 @@ export class FunctionsServer { // we can use the port argument. Otherwise it goes to hosting and // we use port + 1. if (options.port) { - const hostingRunning = options.targets && options.targets.indexOf("hosting") >= 0; + utils.assertIsNumber(options.port); + const targets = options.targets as string[] | undefined; + const port = options.port; + const hostingRunning = targets && targets.indexOf("hosting") >= 0; if (hostingRunning) { - args.port = options.port + 1; + args.port = port + 1; } else { - args.port = options.port; + args.port = port; } } diff --git a/src/test/config.spec.js b/src/test/config.spec.js index 9febcb7fa34..fc6f3a29210 100644 --- a/src/test/config.spec.js +++ b/src/test/config.spec.js @@ -24,36 +24,36 @@ describe("Config", function () { }); }); - describe("#_parseFile", function () { + describe("#parseFile", function () { it("should load a cjson file", function () { var config = new Config({}, { cwd: _fixtureDir("config-imports") }); - expect(config._parseFile("hosting", "hosting.json").public).to.equal("."); + expect(config.parseFile("hosting", "hosting.json").public).to.equal("."); }); it("should error out for an unknown file", function () { var config = new Config({}, { cwd: _fixtureDir("config-imports") }); expect(function () { - config._parseFile("hosting", "i-dont-exist.json"); + config.parseFile("hosting", "i-dont-exist.json"); }).to.throw("Imported file i-dont-exist.json does not exist"); }); it("should error out for an unrecognized extension", function () { var config = new Config({}, { cwd: _fixtureDir("config-imports") }); expect(function () { - config._parseFile("hosting", "unsupported.txt"); + config.parseFile("hosting", "unsupported.txt"); }).to.throw("unsupported.txt is not of a supported config file type"); }); }); - describe("#_materialize", function () { + describe("#materialize", function () { it("should assign unaltered if an object is found", function () { var config = new Config({ example: { foo: "bar" } }, {}); - expect(config._materialize("example").foo).to.equal("bar"); + expect(config.materialize("example").foo).to.equal("bar"); }); it("should prevent top-level key duplication", function () { var config = new Config({ rules: "rules.json" }, { cwd: _fixtureDir("dup-top-level") }); - expect(config._materialize("rules")).to.deep.equal({ ".read": true }); + expect(config.materialize("rules")).to.deep.equal({ ".read": true }); }); }); }); diff --git a/src/test/deploy/functions/containerCleaner.spec.ts b/src/test/deploy/functions/containerCleaner.spec.ts new file mode 100644 index 00000000000..d43ff1e98b9 --- /dev/null +++ b/src/test/deploy/functions/containerCleaner.spec.ts @@ -0,0 +1,211 @@ +import { expect } from "chai"; +import _ from "lodash"; +import * as sinon from "sinon"; + +import * as backend from "../../../deploy/functions/backend"; +import * as containerCleaner from "../../../deploy/functions/containerCleaner"; +import * as docker from "../../../gcp/docker"; + +describe("DockerHelper", () => { + let listTags: sinon.SinonStub; + let deleteTag: sinon.SinonStub; + let deleteImage: sinon.SinonStub; + let helper: containerCleaner.DockerHelper; + + before(() => { + helper = new containerCleaner.DockerHelper("us"); + listTags = sinon.stub(helper.client, "listTags").rejects("Unexpected call"); + deleteTag = sinon.stub(helper.client, "deleteTag").rejects("Unexpected call"); + deleteImage = sinon.stub(helper.client, "deleteImage").rejects("Unexpected call"); + }); + + after(() => { + sinon.verifyAndRestore(); + }); + + const FOO_BAR: docker.Tags = { + name: "foo/bar", + tags: ["tag1", "tag2"], + manifest: { + "sha256:hash1": {} as any, + "sha256:hash2": {} as any, + }, + child: ["baz"], + }; + + const FOO_BAR_BAZ: docker.Tags = { + name: "foo/bar/baz", + tags: ["tag3"], + manifest: { + "sha256:hash3": {} as any, + }, + child: [], + }; + + it("Fetches tags with caching", async () => { + listTags.withArgs("foo/bar").resolves(FOO_BAR); + + await expect(helper.ls("foo/bar")).to.eventually.deep.equal({ + digests: ["sha256:hash1", "sha256:hash2"], + tags: ["tag1", "tag2"], + children: ["baz"], + }); + + await expect(helper.ls("foo/bar")).to.eventually.deep.equal({ + digests: ["sha256:hash1", "sha256:hash2"], + tags: ["tag1", "tag2"], + children: ["baz"], + }); + + // This also verifies that we haven't called at "/foo" to ls "/foo/bar" + expect(listTags).to.have.been.calledOnce; + }); + + it("Deletes recursively", async () => { + listTags.withArgs("foo/bar").resolves(FOO_BAR); + listTags.withArgs("foo/bar/baz").resolves(FOO_BAR_BAZ); + + const remainingTags: Record = { + "foo/bar": ["tag1", "tag2"], + "foo/bar/baz": ["tag3"], + }; + deleteTag.callsFake((path: string, tag: string) => { + if (!remainingTags[path].includes(tag)) { + throw new Error("Cannot remove tag twice"); + } + remainingTags[path].splice(remainingTags[path].indexOf(tag), 1); + }); + deleteImage.callsFake((path: string, digest: string) => { + if (remainingTags[path].length) { + throw new Error("Cannot remove image while tags still pin it"); + } + }); + + await helper.rm("foo/bar"); + + expect(listTags).to.have.been.calledTwice; + expect(listTags).to.have.been.calledWith("foo/bar"); + expect(listTags).to.have.been.calledWith("foo/bar/baz"); + + expect(deleteTag).to.have.been.calledThrice; + expect(deleteTag).to.have.been.calledWith("foo/bar/baz", "tag3"); + expect(deleteTag).to.have.been.calledWith("foo/bar", "tag1"); + expect(deleteTag).to.have.been.calledWith("foo/bar", "tag2"); + + expect(deleteImage).to.have.been.calledThrice; + expect(deleteImage).to.have.been.calledWith("foo/bar/baz", "sha256:hash3"); + expect(deleteImage).to.have.been.calledWith("foo/bar", "sha256:hash1"); + expect(deleteImage).to.have.been.calledWith("foo/bar", "sha256:hash2"); + + await expect(helper.ls("foo/bar")).to.eventually.deep.equal({ + digests: [], + tags: [], + children: [], + }); + await expect(helper.ls("foo/bar/baz")).to.eventually.deep.equal({ + digests: [], + tags: [], + children: [], + }); + }); +}); + +describe("ContainerRegistryCleaner", () => { + const FUNCTION: backend.FunctionSpec = { + apiVersion: 1, + project: "project", + region: "us-central1", + id: "id", + entryPoint: "function", + runtime: "nodejs14", + trigger: { + allowInsecure: false, + }, + }; + + // The first function in a region has subdirectories "cache/" and "worker/" in it. + it("Handles cleanup of first function in the region", async () => { + const cleaner = new containerCleaner.ContainerRegistryCleaner(); + + // Any cast because the stub apparently isn't stubbing getNode as a private member. + // This shouldn't blow up because the public methods are stubbed anyway. + const stub = sinon.createStubInstance(containerCleaner.DockerHelper); + cleaner.helpers["us"] = stub as any; + + stub.ls.withArgs("project/gcf/us-central1").returns( + Promise.resolve({ + children: ["uuid"], + digests: [], + tags: [], + }) + ); + stub.ls.withArgs("project/gcf/us-central1/uuid").returns( + Promise.resolve({ + children: ["cache", "worker"], + digests: ["sha256:func-hash"], + tags: ["id_version-1"], + }) + ); + + await cleaner.cleanupFunction(FUNCTION); + + expect(stub.rm).to.have.been.calledOnceWith("project/gcf/us-central1/uuid"); + }); + + // The second function of the region doesn't have subdirectories + it("Handles cleanup of second function in the region", async () => { + const cleaner = new containerCleaner.ContainerRegistryCleaner(); + + // Any cast because the stub apparently isn't stubbing getNode as a priavte member. + // This shouldn't blow up because the public methods are stubbed anyway. + const stub = sinon.createStubInstance(containerCleaner.DockerHelper); + cleaner.helpers["us"] = stub as any; + + stub.ls.withArgs("project/gcf/us-central1").returns( + Promise.resolve({ + children: ["uuid"], + digests: [], + tags: [], + }) + ); + stub.ls.withArgs("project/gcf/us-central1/uuid").returns( + Promise.resolve({ + children: [], + digests: ["sha256:func-hash"], + tags: ["id_version-1"], + }) + ); + + await cleaner.cleanupFunction(FUNCTION); + + expect(stub.rm).to.have.been.calledOnceWith("project/gcf/us-central1/uuid"); + }); + + it("Leaves other directories alone", async () => { + const cleaner = new containerCleaner.ContainerRegistryCleaner(); + + // Any cast because the stub apparently isn't stubbing getNode as a priavte member. + // This shouldn't blow up because the public methods are stubbed anyway. + const stub = sinon.createStubInstance(containerCleaner.DockerHelper); + cleaner.helpers["us"] = stub as any; + + stub.ls.withArgs("project/gcf/us-central1").returns( + Promise.resolve({ + children: ["uuid"], + digests: [], + tags: [], + }) + ); + stub.ls.withArgs("project/gcf/us-central1/uuid").returns( + Promise.resolve({ + children: [], + digests: ["sha256:func-hash"], + tags: ["other-function_version-1"], + }) + ); + + await cleaner.cleanupFunction(FUNCTION); + + expect(stub.rm).to.not.have.been.called; + }); +}); diff --git a/src/test/deploy/functions/prompts.spec.ts b/src/test/deploy/functions/prompts.spec.ts index c6e36b3f3c7..a3f1613da21 100644 --- a/src/test/deploy/functions/prompts.spec.ts +++ b/src/test/deploy/functions/prompts.spec.ts @@ -35,6 +35,9 @@ const SAMPLE_OPTIONS: Options = { config: {} as any, only: "functions", nonInteractive: false, + json: false, + interactive: false, + debug: false, force: false, filteredTargets: ["functions"], }; diff --git a/src/test/emulators/storage/files.spec.ts b/src/test/emulators/storage/files.spec.ts index 00eb288d4c8..511fea8e5f0 100644 --- a/src/test/emulators/storage/files.spec.ts +++ b/src/test/emulators/storage/files.spec.ts @@ -10,6 +10,10 @@ describe("files", () => { name: "name", bucket: "bucket", contentType: "mime/type", + downloadTokens: ["token123"], + customMetadata: { + foo: "bar", + }, }, cf, Buffer.from("Hello, World!") diff --git a/src/test/extensions/extensionsHelper.spec.ts b/src/test/extensions/extensionsHelper.spec.ts index c7582c562a2..2d06b35a89d 100644 --- a/src/test/extensions/extensionsHelper.spec.ts +++ b/src/test/extensions/extensionsHelper.spec.ts @@ -590,6 +590,26 @@ describe("extensionsHelper", () => { extensionsHelper.validateSpec(testSpec); }).to.throw(FirebaseError, /default/); }); + it("should error if a param selectResource missing resourceType.", () => { + const testSpec = { + version: "0.1.0", + specVersion: "v1beta", + params: [ + { + type: extensionsHelper.SpecParamType.SELECTRESOURCE, + validationRegex: "test", + default: "fail", + }, + ], + resources: [], + sourceUrl: "https://test-source.fake", + license: "apache-2.0", + }; + + expect(() => { + extensionsHelper.validateSpec(testSpec); + }).to.throw(FirebaseError, /must have resourceType/); + }); }); describe("promptForValidInstanceId", () => { diff --git a/src/test/extensions/provisioningHelper.spec.ts b/src/test/extensions/provisioningHelper.spec.ts new file mode 100644 index 00000000000..928d242575b --- /dev/null +++ b/src/test/extensions/provisioningHelper.spec.ts @@ -0,0 +1,207 @@ +import * as nock from "nock"; +import { expect } from "chai"; + +import * as api from "../../api"; +import * as provisioningHelper from "../../extensions/provisioningHelper"; +import * as extensionsApi from "../../extensions/extensionsApi"; +import { FirebaseError } from "../../error"; + +const TEST_INSTANCES_RESPONSE = {}; +const PROJECT_ID = "test-project"; +const SPEC_WITH_STORAGE_AND_AUTH = { + apis: [ + { + apiName: "storage-component.googleapis.com", + }, + { + apiName: "identitytoolkit.googleapis.com", + }, + ] as extensionsApi.Api[], + resources: [] as extensionsApi.Resource[], +} as extensionsApi.ExtensionSpec; + +const FIREDATA_AUTH_ACTIVATED_RESPONSE = { + activation: [ + { + service: "FIREBASE_AUTH", + }, + ], +}; + +const FIREBASE_STORAGE_DEFAULT_BUCKET_LINKED_RESPONSE = { + buckets: [ + { + name: `projects/12345/bucket/${PROJECT_ID}.appspot.com`, + }, + ], +}; + +describe("provisioningHelper", () => { + afterEach(() => { + nock.cleanAll(); + }); + + describe("getUsedProducts", () => { + let testSpec: extensionsApi.ExtensionSpec; + + beforeEach(() => { + testSpec = { + apis: [ + { + apiName: "unrelated.googleapis.com", + }, + ] as extensionsApi.Api[], + roles: [ + { + role: "unrelated.role", + }, + ] as extensionsApi.Role[], + resources: [ + { + propertiesYaml: + "availableMemoryMb: 1024\neventTrigger:\n eventType: providers/unrelates.service/eventTypes/something.do\n resource: projects/_/buckets/${param:IMG_BUCKET}\nlocation: ${param:LOCATION}\nruntime: nodejs10\n", + }, + ] as extensionsApi.Resource[], + } as extensionsApi.ExtensionSpec; + }); + + it("returns empty array when nothing is used", () => { + expect(provisioningHelper.getUsedProducts(testSpec)).to.be.empty; + }); + + it("returns STORAGE when Storage API is used", () => { + testSpec.apis?.push({ + apiName: "storage-component.googleapis.com", + reason: "whatever", + }); + expect(provisioningHelper.getUsedProducts(testSpec)).to.be.deep.eq([ + provisioningHelper.DeferredProduct.STORAGE, + ]); + }); + + it("returns STORAGE when Storage Role is used", () => { + testSpec.roles?.push({ + role: "storage.object.admin", + reason: "whatever", + }); + expect(provisioningHelper.getUsedProducts(testSpec)).to.be.deep.eq([ + provisioningHelper.DeferredProduct.STORAGE, + ]); + }); + + it("returns STORAGE when Storage trigger is used", () => { + testSpec.resources?.push({ + propertiesYaml: + "availableMemoryMb: 1024\neventTrigger:\n eventType: google.storage.object.finalize\n resource: projects/_/buckets/${param:IMG_BUCKET}\nlocation: ${param:LOCATION}\nruntime: nodejs10\n", + } as extensionsApi.Resource); + expect(provisioningHelper.getUsedProducts(testSpec)).to.be.deep.eq([ + provisioningHelper.DeferredProduct.STORAGE, + ]); + }); + + it("returns AUTH when Authentication API is used", () => { + testSpec.apis?.push({ + apiName: "identitytoolkit.googleapis.com", + reason: "whatever", + }); + expect(provisioningHelper.getUsedProducts(testSpec)).to.be.deep.eq([ + provisioningHelper.DeferredProduct.AUTH, + ]); + }); + + it("returns AUTH when Authentication Role is used", () => { + testSpec.roles?.push({ + role: "firebaseauth.user.admin", + reason: "whatever", + }); + expect(provisioningHelper.getUsedProducts(testSpec)).to.be.deep.eq([ + provisioningHelper.DeferredProduct.AUTH, + ]); + }); + + it("returns AUTH when Auth trigger is used", () => { + testSpec.resources?.push({ + propertiesYaml: + "availableMemoryMb: 1024\neventTrigger:\n eventType: providers/firebase.auth/eventTypes/user.create\n resource: projects/_/buckets/${param:IMG_BUCKET}\nlocation: ${param:LOCATION}\nruntime: nodejs10\n", + } as extensionsApi.Resource); + expect(provisioningHelper.getUsedProducts(testSpec)).to.be.deep.eq([ + provisioningHelper.DeferredProduct.AUTH, + ]); + }); + }); + + describe("checkProductsProvisioned", () => { + it("passes provisioning check status when nothing is used", async () => { + await expect( + provisioningHelper.checkProductsProvisioned(PROJECT_ID, { + resources: [] as extensionsApi.Resource[], + } as extensionsApi.ExtensionSpec) + ).to.be.fulfilled; + }); + + it("passes provisioning check when all is provisioned", async () => { + nock(api.firedataOrigin) + .get(`/v1/projects/${PROJECT_ID}/products`) + .reply(200, FIREDATA_AUTH_ACTIVATED_RESPONSE); + nock(api.firebaseStorageOrigin) + .get(`/v1beta/projects/${PROJECT_ID}/buckets`) + .reply(200, FIREBASE_STORAGE_DEFAULT_BUCKET_LINKED_RESPONSE); + + await expect( + provisioningHelper.checkProductsProvisioned(PROJECT_ID, SPEC_WITH_STORAGE_AND_AUTH) + ).to.be.fulfilled; + + expect(nock.isDone()).to.be.true; + }); + + it("fails provisioning check storage when default bucket is not linked", async () => { + nock(api.firedataOrigin) + .get(`/v1/projects/${PROJECT_ID}/products`) + .reply(200, FIREDATA_AUTH_ACTIVATED_RESPONSE); + nock(api.firebaseStorageOrigin) + .get(`/v1beta/projects/${PROJECT_ID}/buckets`) + .reply(200, { + buckets: [ + { + name: `projects/12345/bucket/some-other-bucket`, + }, + ], + }); + + await expect( + provisioningHelper.checkProductsProvisioned(PROJECT_ID, SPEC_WITH_STORAGE_AND_AUTH) + ).to.be.rejectedWith(FirebaseError, "Firebase Storage: store and retrieve user-generated"); + + expect(nock.isDone()).to.be.true; + }); + + it("fails provisioning check storage when no firebase storage buckets", async () => { + nock(api.firedataOrigin) + .get(`/v1/projects/${PROJECT_ID}/products`) + .reply(200, FIREDATA_AUTH_ACTIVATED_RESPONSE); + nock(api.firebaseStorageOrigin).get(`/v1beta/projects/${PROJECT_ID}/buckets`).reply(200, {}); + + await expect( + provisioningHelper.checkProductsProvisioned(PROJECT_ID, SPEC_WITH_STORAGE_AND_AUTH) + ).to.be.rejectedWith(FirebaseError, "Firebase Storage: store and retrieve user-generated"); + + expect(nock.isDone()).to.be.true; + }); + + it("fails provisioning check storage when no auth is not provisioned", async () => { + nock(api.firedataOrigin).get(`/v1/projects/${PROJECT_ID}/products`).reply(200, {}); + nock(api.firebaseStorageOrigin) + .get(`/v1beta/projects/${PROJECT_ID}/buckets`) + .reply(200, FIREBASE_STORAGE_DEFAULT_BUCKET_LINKED_RESPONSE); + + await expect( + provisioningHelper.checkProductsProvisioned(PROJECT_ID, SPEC_WITH_STORAGE_AND_AUTH) + ).to.be.rejectedWith( + FirebaseError, + "Firebase Authentication: authenticate and manage users from" + ); + + expect(nock.isDone()).to.be.true; + }); + }); +}); diff --git a/src/test/extensions/updateHelper.spec.ts b/src/test/extensions/updateHelper.spec.ts index 96292d0dbb8..03a8e8a9c5b 100644 --- a/src/test/extensions/updateHelper.spec.ts +++ b/src/test/extensions/updateHelper.spec.ts @@ -109,7 +109,7 @@ const INSTANCE = { state: "ACTIVE", config: { name: - "projects/invader-zim/instances/image-resizer/configurations/95355951-397f-4821-a5c2-9c9788b2cc63", + "projects/invader-zim/instances/instance-of-official-ext/configurations/95355951-397f-4821-a5c2-9c9788b2cc63", createTime: "2019-05-19T00:20:10.416947Z", sourceId: "fake-official-source", sourceName: "projects/firebasemods/sources/fake-official-source", @@ -120,22 +120,40 @@ const INSTANCE = { }; const REGISTRY_INSTANCE = { - name: "projects/invader-zim/instances/fake-official-instance", + name: "projects/invader-zim/instances/instance-of-registry-ext", createTime: "2019-05-19T00:20:10.416947Z", updateTime: "2019-05-19T00:20:10.416947Z", state: "ACTIVE", config: { name: - "projects/invader-zim/instances/image-resizer/configurations/95355951-397f-4821-a5c2-9c9788b2cc63", + "projects/invader-zim/instances/instance-of-registry-ext/configurations/95355951-397f-4821-a5c2-9c9788b2cc63", createTime: "2019-05-19T00:20:10.416947Z", sourceId: "fake-registry-source", sourceName: "projects/firebasemods/sources/fake-registry-source", + extensionRef: "test-publisher/test", source: { name: "projects/firebasemods/sources/fake-registry-source", }, }, }; +const LOCAL_INSTANCE = { + name: "projects/invader-zim/instances/instance-of-local-ext", + createTime: "2019-05-19T00:20:10.416947Z", + updateTime: "2019-05-19T00:20:10.416947Z", + state: "ACTIVE", + config: { + name: + "projects/invader-zim/instances/instance-of-local-ext/configurations/95355951-397f-4821-a5c2-9c9788b2cc63", + createTime: "2019-05-19T00:20:10.416947Z", + sourceId: "fake-registry-source", + sourceName: "projects/firebasemods/sources/fake-local-source", + source: { + name: "projects/firebasemods/sources/fake-local-source", + }, + }, +}; + describe("updateHelper", () => { describe("updateFromLocalSource", () => { let promptStub: sinon.SinonStub; @@ -563,3 +581,79 @@ describe("inferUpdateSource", () => { expect(result).to.equal("notfirebase/storage-resize-images@latest"); }); }); + +describe("getExistingSourceOrigin", () => { + let registryEntryStub: sinon.SinonStub; + let isOfficialStub: sinon.SinonStub; + let getInstanceStub: sinon.SinonStub; + + afterEach(() => { + registryEntryStub.restore(); + isOfficialStub.restore(); + getInstanceStub.restore(); + }); + + it("should return official extension as source origin", async () => { + registryEntryStub = sinon.stub(resolveSource, "resolveRegistryEntry"); + registryEntryStub.resolves(REGISTRY_ENTRY); + isOfficialStub = sinon.stub(resolveSource, "isOfficialSource"); + isOfficialStub.returns(true); + getInstanceStub = sinon.stub(extensionsApi, "getInstance").resolves(INSTANCE); + + const result = await updateHelper.getExistingSourceOrigin( + "invader-zim", + "instance-of-official-ext", + "ext-testing", + "projects/firebasemods/sources/fake-official-source" + ); + + expect(result).to.equal(extensionsHelper.SourceOrigin.OFFICIAL_EXTENSION); + }); + + it("should return published extension as source origin", async () => { + registryEntryStub = sinon.stub(resolveSource, "resolveRegistryEntry"); + registryEntryStub.throwsException("Entry not found"); + getInstanceStub = sinon.stub(extensionsApi, "getInstance").resolves(REGISTRY_INSTANCE); + + const result = await updateHelper.getExistingSourceOrigin( + "invader-zim", + "instance-of-registry-ext", + "ext-testing", + "projects/firebasemods/sources/fake-registry-source" + ); + + expect(result).to.equal(extensionsHelper.SourceOrigin.PUBLISHED_EXTENSION); + }); + + it("should return local extension as source origin", async () => { + registryEntryStub = sinon.stub(resolveSource, "resolveRegistryEntry"); + registryEntryStub.throwsException("Entry not found"); + getInstanceStub = sinon.stub(extensionsApi, "getInstance").resolves(LOCAL_INSTANCE); + + const result = await updateHelper.getExistingSourceOrigin( + "invader-zim", + "instance-of-local-ext", + "ext-testing", + "projects/firebasemods/sources/fake-local-source" + ); + + expect(result).to.equal(extensionsHelper.SourceOrigin.LOCAL); + }); + + it("should return local extension as source origin", async () => { + registryEntryStub = sinon.stub(resolveSource, "resolveRegistryEntry"); + registryEntryStub.resolves(REGISTRY_ENTRY); + isOfficialStub = sinon.stub(resolveSource, "isOfficialSource"); + isOfficialStub.returns(false); + getInstanceStub = sinon.stub(extensionsApi, "getInstance").resolves(LOCAL_INSTANCE); + + const result = await updateHelper.getExistingSourceOrigin( + "invader-zim", + "instance-of-local-ext", + "ext-testing", + "projects/firebasemods/sources/fake-local-source" + ); + + expect(result).to.equal(extensionsHelper.SourceOrigin.LOCAL); + }); +}); diff --git a/src/test/firebaseConfigValidate.spec.ts b/src/test/firebaseConfigValidate.spec.ts new file mode 100644 index 00000000000..66ba6bffe74 --- /dev/null +++ b/src/test/firebaseConfigValidate.spec.ts @@ -0,0 +1,115 @@ +import { expect } from "chai"; +import { getValidator } from "../firebaseConfigValidate"; +import { FirebaseConfig } from "../firebaseConfig"; +import { valid } from "semver"; + +describe("firebaseConfigValidate", () => { + it("should accept a basic, valid config", () => { + const config: FirebaseConfig = { + database: { + rules: "myrules.json", + }, + hosting: { + public: "public", + }, + emulators: { + database: { + port: 8080, + }, + }, + }; + + const validator = getValidator(); + const isValid = validator(config); + + expect(isValid).to.be.true; + }); + + it("should report an extra top-level field", () => { + // This config has an extra 'bananas' top-level property + const config = { + database: { + rules: "myrules.json", + }, + bananas: {}, + }; + + const validator = getValidator(); + const isValid = validator(config); + + expect(isValid).to.be.false; + expect(validator.errors).to.exist; + expect(validator.errors!.length).to.eq(1); + + const firstError = validator.errors![0]; + expect(firstError.keyword).to.eq("additionalProperties"); + expect(firstError.dataPath).to.eq(""); + expect(firstError.params).to.deep.equal({ additionalProperty: "bananas" }); + }); + + it("should report a missing required field", () => { + // This config is missing 'storage.rules' + const config = { + storage: {}, + }; + + const validator = getValidator(); + const isValid = validator(config); + + expect(isValid).to.be.false; + expect(validator.errors).to.exist; + expect(validator.errors!.length).to.eq(3); + + const [firstError, secondError, thirdError] = validator.errors!; + + // Missing required param + expect(firstError.keyword).to.eq("required"); + expect(firstError.dataPath).to.eq(".storage"); + expect(firstError.params).to.deep.equal({ missingProperty: "rules" }); + + // Because it doesn't match the object type, we also get an "is not an array" + // error since JSON Schema can't tell which type it is closest to. + expect(secondError.keyword).to.eq("type"); + expect(secondError.dataPath).to.eq(".storage"); + expect(secondError.params).to.deep.equal({ type: "array" }); + + // Finally we get an error saying that 'storage' is not any of the known types + expect(thirdError.keyword).to.eq("anyOf"); + expect(thirdError.dataPath).to.eq(".storage"); + expect(thirdError.params).to.deep.equal({}); + }); + + it("should report a field with an incorrect type", () => { + // This config has a number where it should have a string + const config = { + storage: { + rules: 1234, + }, + }; + + const validator = getValidator(); + const isValid = validator(config); + + expect(isValid).to.be.false; + expect(validator.errors).to.exist; + expect(validator.errors!.length).to.eq(3); + + const [firstError, secondError, thirdError] = validator.errors!; + + // Wrong type + expect(firstError.keyword).to.eq("type"); + expect(firstError.dataPath).to.eq(".storage.rules"); + expect(firstError.params).to.deep.equal({ type: "string" }); + + // Because it doesn't match the object type, we also get an "is not an array" + // error since JSON Schema can't tell which type it is closest to. + expect(secondError.keyword).to.eq("type"); + expect(secondError.dataPath).to.eq(".storage"); + expect(secondError.params).to.deep.equal({ type: "array" }); + + // Finally we get an error saying that 'storage' is not any of the known types + expect(thirdError.keyword).to.eq("anyOf"); + expect(thirdError.dataPath).to.eq(".storage"); + expect(thirdError.params).to.deep.equal({}); + }); +}); diff --git a/src/test/init/features/storage.spec.ts b/src/test/init/features/storage.spec.ts index 09c302eacf8..5c2ddd11442 100644 --- a/src/test/init/features/storage.spec.ts +++ b/src/test/init/features/storage.spec.ts @@ -9,11 +9,11 @@ import * as prompt from "../../../prompt"; describe("storage", () => { const sandbox: sinon.SinonSandbox = sinon.createSandbox(); - let writeProjectFileStub: sinon.SinonStub; + let askWriteProjectFileStub: sinon.SinonStub; let promptStub: sinon.SinonStub; beforeEach(() => { - writeProjectFileStub = sandbox.stub(Config.prototype, "writeProjectFile"); + askWriteProjectFileStub = sandbox.stub(Config.prototype, "askWriteProjectFile"); promptStub = sandbox.stub(prompt, "promptOnce"); }); @@ -30,7 +30,7 @@ describe("storage", () => { projectLocation: "us-central", }; promptStub.returns("storage.rules"); - writeProjectFileStub.resolves(); + askWriteProjectFileStub.resolves(); await doSetup(setup, new Config("/path/to/src", {})); diff --git a/src/utils.ts b/src/utils.ts index 9cdb941d189..5bd6b96eec2 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -7,6 +7,7 @@ import * as process from "process"; import { Readable } from "stream"; import * as winston from "winston"; import { SPLAT } from "triple-beam"; +import { AssertionError } from "assert"; const ansiStrip = require("cli-color/strip") as (input: string) => string; import { configstore } from "./configstore"; @@ -499,3 +500,42 @@ export function isRunningInWSL(): boolean { export function thirtyDaysFromNow(): Date { return new Date(Date.now() + THIRTY_DAYS_IN_MILLISECONDS); } + +/** + * See: + * https://www.typescriptlang.org/docs/handbook/release-notes/typescript-3-7.html#assertion-functions + */ +export function assertDefined(val: T, message?: string): asserts val is NonNullable { + if (val === undefined || val === null) { + throw new AssertionError({ + message: message || `expected value to be defined but got "${val}"`, + }); + } +} + +export function assertIsString(val: any, message?: string): asserts val is string { + if (typeof val !== "string") { + throw new AssertionError({ + message: message || `expected "string" but got "${typeof val}"`, + }); + } +} + +export function assertIsNumber(val: any, message?: string): asserts val is number { + if (typeof val !== "number") { + throw new AssertionError({ + message: message || `expected "number" but got "${typeof val}"`, + }); + } +} + +export function assertIsStringOrUndefined( + val: any, + message?: string +): asserts val is string | undefined { + if (!(val === undefined || typeof val === "string")) { + throw new AssertionError({ + message: message || `expected "string" or "undefined" but got "${typeof val}"`, + }); + } +} diff --git a/templates/init/functions/golang/_gitignore b/templates/init/functions/golang/_gitignore new file mode 100644 index 00000000000..f2dd9554a12 --- /dev/null +++ b/templates/init/functions/golang/_gitignore @@ -0,0 +1,12 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out diff --git a/templates/init/functions/golang/functions.go b/templates/init/functions/golang/functions.go new file mode 100644 index 00000000000..1596778ff14 --- /dev/null +++ b/templates/init/functions/golang/functions.go @@ -0,0 +1,38 @@ +package PACKAGE + +// Welcome to Cloud Functions for Firebase for Golang! +// To get started, simply uncomment the below code or create your own. +// Deploy with `firebase deploy` + +/* +import ( + "context" + "fmt" + + "github.com/inlined/go-functions/https" + "github.com/inlined/go-functions/pubsub" + "github.com/inlined/go-functions/runwith" +) + +var HelloWorld = https.Function{ + RunWith: https.Options{ + AvailableMemoryMB: 256, + }, + Callback: func(w https.ResponseWriter, req *https.Request) { + fmt.Println("Hello, world!") + fmt.Fprintf(w, "Hello, world!\n") + }, +} + +var PubSubFunction = pubsub.Function{ + EventType: pubsub.MessagePublished, + Topic: "topic", + RunWith: runwith.Options{ + AvailableMemoryMB: 256, + }, + Callback: func(ctx context.Context, message pubsub.Message) error { + fmt.Printf("Got Pub/Sub event %+v", message) + return nil + }, +} +*/