diff --git a/.eslintrc.json b/.eslintrc.json index 5c40701f..56443bd7 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -50,6 +50,7 @@ "import/no-extraneous-dependencies": ["error", { "devDependencies": true }], - "import/no-cycle": "off" + "import/no-cycle": "off", + "linebreak-style": "off" } } diff --git a/.gitignore b/.gitignore index 2fd83a6a..621d577e 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ .idea node_modules dist +coverage diff --git a/config/jest.coverage.ts b/config/jest.coverage.ts new file mode 100644 index 00000000..475e2333 --- /dev/null +++ b/config/jest.coverage.ts @@ -0,0 +1,17 @@ +import * as path from 'path'; +import config from './jest'; + +/* + * For a detailed explanation regarding each configuration property and type check, visit: + * https://jestjs.io/docs/configuration + */ + +export default { + ...config, + collectCoverage: true, + collectCoverageFrom: [ + '**/*.{js,ts}', + '!generated/**', + ], + coverageDirectory: '../coverage', +}; diff --git a/config/jest.ts b/config/jest.ts index 425f69f0..3da03eb2 100644 --- a/config/jest.ts +++ b/config/jest.ts @@ -10,6 +10,7 @@ export default { 'js', 'ts', ], + modulePathIgnorePatterns: ['./src/generated'], preset: 'ts-jest', rootDir: path.resolve('./src/'), transform: { diff --git a/package-lock.json b/package-lock.json index 8d9a1dd5..ccbed3c2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -46,6 +46,7 @@ "grpc-tools": "^1.11.2", "husky": "^7.0.4", "jest": "^27.4.5", + "pre-commit": "^1.2.2", "semantic-release": "^21.0.1", "ts-jest": "^27.1.1", "ts-node": "^10.4.0", @@ -3235,14 +3236,24 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001286", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001286.tgz", - "integrity": "sha512-zaEMRH6xg8ESMi2eQ3R4eZ5qw/hJiVsO/HlLwniIwErij0JDr9P+8V4dtx1l+kLq6j3yy8l8W4fst1lBnat5wQ==", + "version": "1.0.30001520", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001520.tgz", + "integrity": "sha512-tahF5O9EiiTzwTUqAeFjIZbn4Dnqxzz7ktrgGlMYNLH43Ul26IgTMH/zvL3DG0lZxBYnlT04axvInszUsZULdA==", "dev": true, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - } + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ] }, "node_modules/cardinal": { "version": "2.1.1", @@ -3427,6 +3438,51 @@ "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", "dev": true }, + "node_modules/concat-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", + "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", + "dev": true, + "engines": [ + "node >= 0.8" + ], + "dependencies": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^2.2.2", + "typedarray": "^0.0.6" + } + }, + "node_modules/concat-stream/node_modules/readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dev": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/concat-stream/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/concat-stream/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, "node_modules/config-chain": { "version": "1.1.13", "resolved": "https://registry.npmjs.org/config-chain/-/config-chain-1.1.13.tgz", @@ -11110,6 +11166,15 @@ "node": ">= 0.8.0" } }, + "node_modules/os-shim": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/os-shim/-/os-shim-0.1.3.tgz", + "integrity": "sha512-jd0cvB8qQ5uVt0lvCIexBaROw1KyKm5sbulg2fWOHjETisuCzWyt+eTZKEMs8v6HwzoGs8xik26jg7eCM6pS+A==", + "dev": true, + "engines": { + "node": ">= 0.4.0" + } + }, "node_modules/p-each-series": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/p-each-series/-/p-each-series-3.0.0.tgz", @@ -11425,6 +11490,78 @@ "node": ">=4" } }, + "node_modules/pre-commit": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/pre-commit/-/pre-commit-1.2.2.tgz", + "integrity": "sha512-qokTiqxD6GjODy5ETAIgzsRgnBWWQHQH2ghy86PU7mIn/wuWeTwF3otyNQZxWBwVn8XNr8Tdzj/QfUXpH+gRZA==", + "dev": true, + "hasInstallScript": true, + "dependencies": { + "cross-spawn": "^5.0.1", + "spawn-sync": "^1.0.15", + "which": "1.2.x" + } + }, + "node_modules/pre-commit/node_modules/cross-spawn": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", + "integrity": "sha512-pTgQJ5KC0d2hcY8eyL1IzlBPYjTkyH72XRZPnLyKus2mBfNjQs3klqbJU2VILqZryAZUt9JOb3h/mWMy23/f5A==", + "dev": true, + "dependencies": { + "lru-cache": "^4.0.1", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } + }, + "node_modules/pre-commit/node_modules/lru-cache": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "dev": true, + "dependencies": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + } + }, + "node_modules/pre-commit/node_modules/shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", + "dev": true, + "dependencies": { + "shebang-regex": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pre-commit/node_modules/shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pre-commit/node_modules/which": { + "version": "1.2.14", + "resolved": "https://registry.npmjs.org/which/-/which-1.2.14.tgz", + "integrity": "sha512-16uPglFkRPzgiUXYMi1Jf8Z5EzN1iB4V0ZtMXcHZnwsBtQhhHeCqoWw7tsUY42hJGNDWtUsVLTjakIa5BgAxCw==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" + } + }, + "node_modules/pre-commit/node_modules/yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A==", + "dev": true + }, "node_modules/prelude-ls": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", @@ -11530,6 +11667,12 @@ "node": ">=12.0.0" } }, + "node_modules/pseudomap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", + "integrity": "sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ==", + "dev": true + }, "node_modules/psl": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", @@ -12564,6 +12707,17 @@ "integrity": "sha1-Gv2Uc46ZmwNG17n8NzvlXgdXcCk=", "dev": true }, + "node_modules/spawn-sync": { + "version": "1.0.15", + "resolved": "https://registry.npmjs.org/spawn-sync/-/spawn-sync-1.0.15.tgz", + "integrity": "sha512-9DWBgrgYZzNghseho0JOuh+5fg9u6QWhAWa51QC7+U5rCheZ/j1DrEZnyE0RBBRqZ9uEXGPgSSM0nky6burpVw==", + "dev": true, + "hasInstallScript": true, + "dependencies": { + "concat-stream": "^1.4.7", + "os-shim": "^0.1.2" + } + }, "node_modules/spdx-correct": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz", @@ -13326,6 +13480,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/typedarray": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", + "integrity": "sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==", + "dev": true + }, "node_modules/typedarray-to-buffer": { "version": "3.1.5", "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", @@ -16222,9 +16382,9 @@ } }, "caniuse-lite": { - "version": "1.0.30001286", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001286.tgz", - "integrity": "sha512-zaEMRH6xg8ESMi2eQ3R4eZ5qw/hJiVsO/HlLwniIwErij0JDr9P+8V4dtx1l+kLq6j3yy8l8W4fst1lBnat5wQ==", + "version": "1.0.30001520", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001520.tgz", + "integrity": "sha512-tahF5O9EiiTzwTUqAeFjIZbn4Dnqxzz7ktrgGlMYNLH43Ul26IgTMH/zvL3DG0lZxBYnlT04axvInszUsZULdA==", "dev": true }, "cardinal": { @@ -16370,6 +16530,50 @@ "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", "dev": true }, + "concat-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", + "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", + "dev": true, + "requires": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^2.2.2", + "typedarray": "^0.0.6" + }, + "dependencies": { + "readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "requires": { + "safe-buffer": "~5.1.0" + } + } + } + }, "config-chain": { "version": "1.1.13", "resolved": "https://registry.npmjs.org/config-chain/-/config-chain-1.1.13.tgz", @@ -22007,6 +22211,12 @@ "word-wrap": "~1.2.3" } }, + "os-shim": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/os-shim/-/os-shim-0.1.3.tgz", + "integrity": "sha512-jd0cvB8qQ5uVt0lvCIexBaROw1KyKm5sbulg2fWOHjETisuCzWyt+eTZKEMs8v6HwzoGs8xik26jg7eCM6pS+A==", + "dev": true + }, "p-each-series": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/p-each-series/-/p-each-series-3.0.0.tgz", @@ -22231,6 +22441,70 @@ "integrity": "sha512-Nc3IT5yHzflTfbjgqWcCPpo7DaKy4FnpB0l/zCAW0Tc7jxAiuqSxHasntB3D7887LSrA93kDJ9IXovxJYxyLCA==", "dev": true }, + "pre-commit": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/pre-commit/-/pre-commit-1.2.2.tgz", + "integrity": "sha512-qokTiqxD6GjODy5ETAIgzsRgnBWWQHQH2ghy86PU7mIn/wuWeTwF3otyNQZxWBwVn8XNr8Tdzj/QfUXpH+gRZA==", + "dev": true, + "requires": { + "cross-spawn": "^5.0.1", + "spawn-sync": "^1.0.15", + "which": "1.2.x" + }, + "dependencies": { + "cross-spawn": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", + "integrity": "sha512-pTgQJ5KC0d2hcY8eyL1IzlBPYjTkyH72XRZPnLyKus2mBfNjQs3klqbJU2VILqZryAZUt9JOb3h/mWMy23/f5A==", + "dev": true, + "requires": { + "lru-cache": "^4.0.1", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } + }, + "lru-cache": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", + "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "dev": true, + "requires": { + "pseudomap": "^1.0.2", + "yallist": "^2.1.2" + } + }, + "shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", + "dev": true, + "requires": { + "shebang-regex": "^1.0.0" + } + }, + "shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", + "dev": true + }, + "which": { + "version": "1.2.14", + "resolved": "https://registry.npmjs.org/which/-/which-1.2.14.tgz", + "integrity": "sha512-16uPglFkRPzgiUXYMi1Jf8Z5EzN1iB4V0ZtMXcHZnwsBtQhhHeCqoWw7tsUY42hJGNDWtUsVLTjakIa5BgAxCw==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + }, + "yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A==", + "dev": true + } + } + }, "prelude-ls": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", @@ -22310,6 +22584,12 @@ "long": "^5.0.0" } }, + "pseudomap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", + "integrity": "sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ==", + "dev": true + }, "psl": { "version": "1.8.0", "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", @@ -23027,6 +23307,16 @@ "integrity": "sha1-Gv2Uc46ZmwNG17n8NzvlXgdXcCk=", "dev": true }, + "spawn-sync": { + "version": "1.0.15", + "resolved": "https://registry.npmjs.org/spawn-sync/-/spawn-sync-1.0.15.tgz", + "integrity": "sha512-9DWBgrgYZzNghseho0JOuh+5fg9u6QWhAWa51QC7+U5rCheZ/j1DrEZnyE0RBBRqZ9uEXGPgSSM0nky6burpVw==", + "dev": true, + "requires": { + "concat-stream": "^1.4.7", + "os-shim": "^0.1.2" + } + }, "spdx-correct": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz", @@ -23620,6 +23910,12 @@ "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", "dev": true }, + "typedarray": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", + "integrity": "sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==", + "dev": true + }, "typedarray-to-buffer": { "version": "3.1.5", "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", diff --git a/package.json b/package.json index 0a01db09..081ac2cb 100644 --- a/package.json +++ b/package.json @@ -66,8 +66,10 @@ "typescript": "^4.5.4" }, "scripts": { - "test": "cross-env NODE_OPTIONS=\"--max-old-space-size=4096\" jest -c config/jest.ts --passWithNoTests '.*\\.test\\.ts$'", + "test": "cross-env NODE_OPTIONS=\"--max-old-space-size=4096\" jest -c config/jest.ts", + "test:coverage": "cross-env NODE_OPTIONS=\"--max-old-space-size=4096\" jest -c config/jest.coverage.ts", "lint": "eslint src config", + "lint:fix": "eslint src config --fix", "build": "cross-env NODE_OPTIONS=\"--max-old-space-size=4096\" tsc -p .", "generate-code": "ts-node scripts/generate-code.ts", "prepare": "husky install", diff --git a/scripts/generate-code.ts b/scripts/generate-code.ts index 59cbaf89..8e71cea3 100644 --- a/scripts/generate-code.ts +++ b/scripts/generate-code.ts @@ -4,7 +4,7 @@ import * as path from 'path'; import * as fg from 'fast-glob'; import * as _ from 'lodash'; -import { logger } from '../src/utils/logger'; +import { logger } from '../src/utils/simple-logger'; import { servicesConfig } from './services'; const GENERATED_CODE_DIR = path.resolve('./src/generated'); diff --git a/src/__tests__/metadata-token-service/010.constructors.ts b/src/__tests__/metadata-token-service/010.constructors.ts new file mode 100644 index 00000000..f532e24c --- /dev/null +++ b/src/__tests__/metadata-token-service/010.constructors.ts @@ -0,0 +1,73 @@ +import { MetadataTokenService } from '../../token-service/metadata-token-service'; +import { buildTestLogger } from '../../utils/test-logger'; +import { + Messages, + DEFAULT_URL, + DEFAULT_OPTIONS, +} from '../../token-service/metadata-token-service.consts'; + +import Mock = jest.Mock; + +const { + testLogger, + testLoggerFn, +} = buildTestLogger(); + +beforeEach(() => { + jest.spyOn(MetadataTokenService.prototype, 'getToken'); + // so init sequenc whouldn't start in the constructor + (MetadataTokenService.prototype.getToken as Mock).mockReturnValue(Promise.resolve()); +}); + +afterEach(() => { + testLoggerFn.mockReset(); +}); + +describe('metadata-token-service.constructors', () => { + it('constructor: default', async () => { + const metadataTokenService = new MetadataTokenService({ logger: testLogger }); + + expect(testLoggerFn.mock.calls) + .toEqual([ + ['debug', + Messages.debug_ctor, + DEFAULT_URL, + false, + DEFAULT_OPTIONS, + ]]); + }); + + for (const url of [undefined, 'AnURL']) { + for (const doUpdateTokenInBackground of [undefined, true, false]) { + // eslint-disable-next-line @typescript-eslint/no-loop-func + it(`constructor: ${url} ${doUpdateTokenInBackground}`, async () => { + const metadataTokenService = url + ? new MetadataTokenService(url, { + headers: {}, + doUpdateTokenInBackground, + logger: testLogger, + }) + : new MetadataTokenService({ + headers: {}, + doUpdateTokenInBackground, + logger: testLogger, + }); + + await metadataTokenService.destroy(); + + expect(testLoggerFn.mock.calls) + .toEqual([ + ['debug', + Messages.debug_ctor, + url === undefined + ? DEFAULT_URL + : url, + doUpdateTokenInBackground === undefined ? false : doUpdateTokenInBackground, + { headers: {} }, + ], ['trace', + Messages.trace_destroy, + ]]); + }); + } + } +}); diff --git a/src/__tests__/metadata-token-service/020.set-iam-response.ts b/src/__tests__/metadata-token-service/020.set-iam-response.ts new file mode 100644 index 00000000..b0538b95 --- /dev/null +++ b/src/__tests__/metadata-token-service/020.set-iam-response.ts @@ -0,0 +1,158 @@ +import { buildTestLogger } from '../../utils/test-logger'; +import { MetadataTokenService } from '../../token-service/metadata-token-service'; +import { FakeTimersFixture } from '../../utils/tests/fake-timers-fixture'; +import { HRInterval } from '../../utils/hr-interval'; +import IamGetTokenResponse = MetadataTokenService.IamToken; +import { + TOKEN_LIFETIME_LEFT_TO_REFRESH_PCT, + TOKEN_LIFETIME_LEFT_TO_REPORT_ERROR_PCT, + TOKEN_MINIMUM_LIFETIME_MARGIN_MS, + Messages, +} from '../../token-service/metadata-token-service.consts'; + +describe('MetadataTokenService.setIamResponse', () => { + const fakeTimersFixture = new FakeTimersFixture(); + + const { + testLogger, + testLoggerFn, + } = buildTestLogger(); + + let metadataTokenService: MetadataTokenService; + + beforeEach(() => { + fakeTimersFixture.setup(); + metadataTokenService = new MetadataTokenService({ logger: testLogger }); + testLoggerFn.mockReset(); + }); + + afterEach(() => { + testLoggerFn.mockReset(); + fakeTimersFixture.dispose(); + }); + + it('general', async () => { + const TTL = 10 * 60 * 60; // sec + + metadataTokenService.setIamToken({ + token_type: 'Bearer', + access_token: '123', + expires_in: TTL, + }); + + // @ts-ignore + expect((metadataTokenService as unknown).token) + .toBe('123'); + // @ts-ignore + expect((metadataTokenService as unknown).tokenExpiresAt) + .toBe(Date.now() + (TTL * 1000) - TOKEN_MINIMUM_LIFETIME_MARGIN_MS); + // @ts-ignore + expect((metadataTokenService as unknown).tokenRefreshAt) + .toBe(Date.now() + (TTL * 1000) * (1 - TOKEN_LIFETIME_LEFT_TO_REFRESH_PCT / 100)); + // @ts-ignore + expect((metadataTokenService as unknown).tokenStartReportTTLAt) + .toBe(Date.now() + (TTL * 1000) * (1 - TOKEN_LIFETIME_LEFT_TO_REPORT_ERROR_PCT / 100)); + + expect(testLoggerFn.mock.calls) + .toEqual([ + [ + 'trace', + Messages.trace_setIamResponse, + ], + [ + 'debug', + Messages.debug_new_token_was_received, + new HRInterval(36_000_000), + '', + ], + ]); + }); + + it('new token received after an error', async () => { + const TTL = 10 * 60 * 60; // sec + + // @ts-ignore + (metadataTokenService as unknown).tokenLastError = {}; + + metadataTokenService.setIamToken({ + token_type: 'Bearer', + access_token: '123', + expires_in: TTL, + }); + + expect(testLoggerFn.mock.calls) + .toEqual([ + [ + 'trace', + Messages.trace_setIamResponse, + ], + [ + 'info', // it's info, there was an error + Messages.debug_new_token_was_received, + new HRInterval(36_000_000), + '', + ], + ]); + }); + + it('token with TTL less than allowed', async () => { + expect(() => metadataTokenService.setIamToken({ + token_type: 'Bearer', + access_token: '123', + expires_in: TOKEN_MINIMUM_LIFETIME_MARGIN_MS / 1000 / 2, + })) + .toThrow(new Error('insufficient lifetime: 00:07:30')); + + expect(testLoggerFn.mock.calls) + .toEqual([ + [ + 'trace', + Messages.trace_setIamResponse, + ], + [ + 'debug', + Messages.debug_new_token_was_received, + new HRInterval(450_000), + ' (too small TTL)', + ], + ]); + }); + + it('invalid token structure', async () => { + expect(() => metadataTokenService.setIamToken(undefined as unknown as IamGetTokenResponse)) + .toThrow(new Error('invalid iam token: undefined')); + + expect(() => metadataTokenService.setIamToken(null as unknown as IamGetTokenResponse)) + .toThrow(new Error('invalid iam token: null')); + + expect(() => metadataTokenService.setIamToken({} as IamGetTokenResponse)) + .toThrow(new Error('invalid iam token: {}')); + + expect(() => metadataTokenService.setIamToken({ + token_type: 'Bearer', + expires_in: 'str', + } as unknown as IamGetTokenResponse)) + .toThrow(new Error('invalid iam token: { token_type: \'Bearer\', expires_in: \'str\' }')); + + expect(() => metadataTokenService.setIamToken({ + token_type: 'Bearer', + expires_in: -1, + } as unknown as IamGetTokenResponse)) + .toThrow(new Error('invalid iam token: { token_type: \'Bearer\', expires_in: -1 }')); + + expect(() => metadataTokenService.setIamToken({ + token_type: 'Bearer', + expires_in: 10 * 60 * 60, + access_token: 111, + } as unknown as IamGetTokenResponse)) + .toThrow(new Error('invalid iam token: { token_type: \'Bearer\', expires_in: 36000, access_token: 111 }')); + + expect(() => metadataTokenService.setIamToken({ + token_type: 'Bearer', + expires_in: 10 * 60 * 60, + access_token: '123', + } as unknown as IamGetTokenResponse)) + .not + .toThrow(); + }); +}); diff --git a/src/__tests__/metadata-token-service/030.fetch-token.ts b/src/__tests__/metadata-token-service/030.fetch-token.ts new file mode 100644 index 00000000..ff060b6a --- /dev/null +++ b/src/__tests__/metadata-token-service/030.fetch-token.ts @@ -0,0 +1,129 @@ +import axios from 'axios'; +import { FakeTimersFixture } from '../../utils/tests/fake-timers-fixture'; +import { MetadataTokenService, setTestInitializeTimerAdvance } from '../../token-service/metadata-token-service'; +import Mock = jest.Mock; +import { buildTestLogger } from '../../utils/test-logger'; +import { HRInterval } from '../../utils/hr-interval'; +import { + GET_TOKEN_BACKOFF_SLOT_DURATION, + GET_TOKEN_BACKOFF_CEILING, + GET_TOKEN_BACKOFF_UNCERTAIN_RATIO, + Messages, +} from '../../token-service/metadata-token-service.consts'; + +const TTL = 10 * 60 * 60; +const RANDOM = 0.7; + +describe('MetadataTokenService.fetchToken', () => { + const fakeTimersFixture = new FakeTimersFixture(); + + const { + testLogger, + testLoggerFn, + } = buildTestLogger(); + + let metadataTokenService: MetadataTokenService; + + beforeEach(() => { + fakeTimersFixture.setup(); + jest.spyOn(axios, 'get'); + jest.spyOn(Math, 'random'); + (Math.random as Mock).mockReturnValue(RANDOM); + metadataTokenService = new MetadataTokenService({ logger: testLogger }); + testLoggerFn.mockReset(); // clear constructor log + }); + + afterEach(async () => { + testLoggerFn.mockReset(); + jest.restoreAllMocks(); + await fakeTimersFixture.dispose(); + }); + + it('successfully fetching token', async () => { + (axios.get as Mock).mockReturnValue({ + status: 200, + statusText: 'ok', + data: { + token_type: 'Bearer', + access_token: '123', + expires_in: TTL, + }, + }); + + // @ts-ignore + expect(await (metadataTokenService as unknown).fetchToken()) + .toBe('123'); + + expect(testLoggerFn.mock.calls) + .toEqual([ + [ + 'trace', + Messages.trace_fetchToken, + ], + [ + 'trace', + Messages.trace_setIamResponse, + ], + [ + 'debug', + Messages.debug_new_token_was_received, + new HRInterval(TTL * 1000), + '', + ], + ]); + }); + + it('error when fetching token, with increasing delay between attempts', async () => { + (axios.get as Mock).mockReturnValue({ + status: 400, + statusText: 'error', + }); + + // @ts-ignore + (metadataTokenService as unknown).tokenExpiresAt = Date.now() + TTL; + // @ts-ignore + (metadataTokenService as unknown).token = '123'; + // @ts-ignore + (metadataTokenService as unknown).tokenStartReportTTLAt = Date.now() + TTL + 24; + + for (let i = 0; i < GET_TOKEN_BACKOFF_CEILING + 2; i++) { + // @ts-ignore + // eslint-disable-next-line no-await-in-loop + await expect((metadataTokenService as unknown).fetchToken()) + .rejects + .toThrow(new Error('failed to fetch token: 400 error')); + + // @ts-ignore + // eslint-disable-next-line no-bitwise + const slotsCount = 1 << Math.min((metadataTokenService as unknown).tokenErrorCount - 1, GET_TOKEN_BACKOFF_CEILING); + const maxDuration = slotsCount * GET_TOKEN_BACKOFF_SLOT_DURATION; + + // @ts-ignore + expect((metadataTokenService as unknown).tokenRefreshAt) + .toBe(Date.now() + maxDuration * (1 - RANDOM * GET_TOKEN_BACKOFF_UNCERTAIN_RATIO)); + + // @ts-ignore + // after error remains the same + expect((metadataTokenService as unknown).token) + .toBe('123'); + // @ts-ignore + expect((metadataTokenService as unknown).tokenExpiresAt) + .toBe(Date.now() + TTL); + // @ts-ignore + expect((metadataTokenService as unknown).tokenStartReportTTLAt) + .toBe(Date.now() + TTL + 24); + + expect(testLoggerFn.mock.calls) + .toEqual([ + [ + 'trace', + Messages.trace_fetchToken, + ], + [ + 'error', + 'failed to fetch token: 400 error', + ]]); + testLoggerFn.mockReset(); + } + }); +}); diff --git a/src/__tests__/metadata-token-service/040.initialize.ts b/src/__tests__/metadata-token-service/040.initialize.ts new file mode 100644 index 00000000..ef0fd7ef --- /dev/null +++ b/src/__tests__/metadata-token-service/040.initialize.ts @@ -0,0 +1,140 @@ +import axios from 'axios'; +import { FakeTimersFixture } from '../../utils/tests/fake-timers-fixture'; +import { MetadataTokenService, setTestInitializeTimerAdvance } from '../../token-service/metadata-token-service'; +import Mock = jest.Mock; +import { buildTestLogger } from '../../utils/test-logger'; +import { HRInterval } from '../../utils/hr-interval'; +import { + INITIALIZE_MAX_ATTEMPTS_OF_GET_TOKEN, + Messages, +} from '../../token-service/metadata-token-service.consts'; + +const TTL = 10 * 60 * 60; +const RANDOM = 0.7; + +describe('MetadataTokenService.initialize', () => { + const fakeTimersFixture = new FakeTimersFixture(); + + setTestInitializeTimerAdvance(async (duration) => { + await fakeTimersFixture.advanceTimer(duration); + }); + + const { + testLogger, + testLoggerFn, + } = buildTestLogger(); + + let metadataTokenService: MetadataTokenService; + + beforeEach(() => { + fakeTimersFixture.setup(); + jest.spyOn(axios, 'get'); + jest.spyOn(Math, 'random'); + (Math.random as Mock).mockReturnValue(RANDOM); + metadataTokenService = new MetadataTokenService({ logger: testLogger }); + testLoggerFn.mockReset(); // clear constructor log + }); + + afterEach(async () => { + testLoggerFn.mockReset(); + jest.restoreAllMocks(); + await fakeTimersFixture.dispose(); + }); + + it('token was successfully obtained from the beginning', async () => { + (axios.get as Mock).mockReturnValue({ + status: 200, + statusText: 'ok', + data: { + token_type: 'Bearer', + access_token: '123', + expires_in: TTL, + }, + }); + + // @ts-ignore + await (metadataTokenService as unknown).initialize(); + + // @ts-ignore + expect((metadataTokenService as unknown).token) + .toBe('123'); + }); + + it('the token was not successfully obtained on the first attempt', async () => { + const N = Math.trunc(INITIALIZE_MAX_ATTEMPTS_OF_GET_TOKEN / 2); + + const nextReturn = (function* nextReturn() { + for (let i = 0; i < N; i++) { + yield { + status: 400, + statusText: 'error', + }; + } + + return { + status: 200, + statusText: 'ok', + data: { + token_type: 'Bearer', + access_token: '123', + expires_in: TTL, + }, + }; + }()); + + (axios.get as Mock).mockImplementation(() => nextReturn.next().value); + + // @ts-ignore + await (metadataTokenService as unknown).initialize(); + + expect(testLoggerFn.mock.calls) + .toEqual([ + [ + 'trace', + Messages.trace_initialize, + ], + [ + 'trace', + Messages.trace_fetchToken, + ], + [ + 'error', + 'failed to fetch token: 400 error', + ], + [ + 'trace', + Messages.trace_fetchToken, + ], + [ + 'error', + 'failed to fetch token: 400 error', + ], + [ + 'trace', + Messages.trace_fetchToken, + ], + [ + 'trace', + Messages.trace_setIamResponse, + ], + [ + 'debug', + Messages.debug_new_token_was_received, + new HRInterval(TTL * 1000), + '', + ], + ]); + }); + + it('all attempts failed', async () => { + (axios.get as Mock).mockReturnValue({ + status: 400, + statusText: 'error', + }); + + // @ts-ignore + await expect((metadataTokenService as unknown).initialize()) + .rejects + .toThrow(new Error('failed to fetch token: 400 error')); + }); +}); diff --git a/src/__tests__/metadata-token-service/050.get-token.ts b/src/__tests__/metadata-token-service/050.get-token.ts new file mode 100644 index 00000000..7b1d60e9 --- /dev/null +++ b/src/__tests__/metadata-token-service/050.get-token.ts @@ -0,0 +1,448 @@ +import axios from 'axios'; +import { FakeTimersFixture } from '../../utils/tests/fake-timers-fixture'; +import { MetadataTokenService, setTestInitializeTimerAdvance } from '../../token-service/metadata-token-service'; +import Mock = jest.Mock; +import { buildTestLogger } from '../../utils/test-logger'; +import { HRInterval } from '../../utils/hr-interval'; +import { + INITIALIZE_MAX_ATTEMPTS_OF_GET_TOKEN, + Messages, +} from '../../token-service/metadata-token-service.consts'; + +const TTL = 10 * 60 * 60; + +describe('MetadataTokenService.getToken', () => { + const fakeTimersFixture = new FakeTimersFixture(); + + setTestInitializeTimerAdvance(async (duration) => { + await fakeTimersFixture.advanceTimer(duration); + }); + + const { + testLogger, + testLoggerFn, + } = buildTestLogger(); + + let metadataTokenService: MetadataTokenService; + + beforeEach(() => { + fakeTimersFixture.setup(); + jest.spyOn(axios, 'get'); + jest.spyOn(Math, 'random'); + (Math.random as Mock).mockReturnValue(1); + metadataTokenService = new MetadataTokenService({ logger: testLogger }); + testLoggerFn.mockReset(); // clear constructor log + }); + + afterEach(async () => { + testLoggerFn.mockReset(); + jest.restoreAllMocks(); + await fakeTimersFixture.dispose(); + }); + + it('initial state, successfully fetching token', async () => { + (axios.get as Mock).mockReturnValue({ + status: 200, + statusText: 'ok', + data: { + token_type: 'Bearer', + access_token: '123', + expires_in: TTL, + }, + }); + + expect(await metadataTokenService.getToken()) + .toBe('123'); + + expect(testLoggerFn.mock.calls) + .toEqual([ + [ + 'trace', + Messages.trace_getToken, + ], + [ + 'trace', + Messages.trace__getToken, + ], + [ + 'trace', + Messages.trace_initialize, + ], + [ + 'trace', + Messages.trace_fetchToken, + ], + [ + 'trace', + Messages.trace_setIamResponse, + ], + [ + 'debug', + Messages.debug_new_token_was_received, + new HRInterval(TTL * 1000), + '', + ], + ]); + }); + + it('initial state, error when fetching token', async () => { + (axios.get as Mock).mockReturnValue({ + status: 400, + statusText: 'error', + }); + + await expect(metadataTokenService.getToken()) + .rejects + .toThrow(new Error('failed to fetch token: 400 error')); + + expect(testLoggerFn.mock.calls) + .toEqual((() => { + const res = [ + [ + 'trace', + Messages.trace_getToken, + ], + [ + 'trace', + Messages.trace__getToken, + ], + [ + 'trace', + Messages.trace_initialize, + ], + ]; + + for (let i = 0; i < INITIALIZE_MAX_ATTEMPTS_OF_GET_TOKEN; i++) { + res.push( + [ + 'trace', + Messages.trace_fetchToken, + ], + [ + 'error', + 'failed to fetch token: 400 error', + ], + ); + } + + return res; + })()); + }); + + it('there is a token, but at the next fetchToken() an error is received', async () => { + (axios.get as Mock).mockReturnValue({ + status: 200, + statusText: 'ok', + data: { + token_type: 'Bearer', + access_token: '123', + expires_in: TTL, + }, + }); + + expect(await metadataTokenService.getToken()) + .toBe('123'); + + testLoggerFn.mockReset(); // log starts from here + + (axios.get as Mock).mockReturnValue({ + status: 400, + statusText: 'error', + }); + + // @ts-ignore + await fakeTimersFixture.advanceTimer((metadataTokenService as unknown).tokenRefreshAt - Date.now()); + + expect(await metadataTokenService.getToken()) + .toBe('123'); + + expect(testLoggerFn.mock.calls) + .toEqual([ + [ + 'trace', + Messages.trace_getToken, + ], + [ + 'trace', + Messages.trace__getToken, + ], + [ + 'trace', + Messages.trace_fetchToken, + ], + [ + 'error', + 'failed to fetch token: 400 error', + ]]); + }); + + it('token expired, successfully fetch new token', async () => { + (axios.get as Mock).mockReturnValue({ + status: 200, + statusText: 'ok', + data: { + token_type: 'Bearer', + access_token: '123', + expires_in: TTL, + }, + }); + + expect(await metadataTokenService.getToken()) + .toBe('123'); + + testLoggerFn.mockReset(); // log starts from here + + // @ts-ignore + await fakeTimersFixture.advanceTimer((metadataTokenService as unknown).tokenExpiresAt - Date.now()); + + expect(await metadataTokenService.getToken()) + .toBe('123'); + + expect(testLoggerFn.mock.calls) + .toEqual([ + [ + 'trace', + Messages.trace_getToken, + ], + [ + 'trace', + Messages.trace__getToken, + ], + [ + 'trace', + Messages.trace_initialize, + ], + [ + 'trace', + Messages.trace_fetchToken, + ], + [ + 'trace', + Messages.trace_setIamResponse, + ], + [ + 'debug', + Messages.debug_new_token_was_received, + new HRInterval(TTL * 1000), + '', + ]]); + }); + + it('token expired, error when fetching new token', async () => { + (axios.get as Mock).mockReturnValue({ + status: 200, + statusText: 'ok', + data: { + token_type: 'Bearer', + access_token: '123', + expires_in: TTL, + }, + }); + + expect(await metadataTokenService.getToken()) + .toBe('123'); + + testLoggerFn.mockReset(); // log starts from here + + (axios.get as Mock).mockReturnValue({ + status: 400, + statusText: 'error', + }); + + // @ts-ignore + await fakeTimersFixture.advanceTimer((metadataTokenService as unknown).tokenExpiresAt - Date.now()); + + await expect(metadataTokenService.getToken()) + .rejects + .toThrow(new Error('failed to fetch token: 400 error')); + + expect(testLoggerFn.mock.calls) + .toEqual((() => { + const res = [ + [ + 'trace', + Messages.trace_getToken, + ], + [ + 'trace', + Messages.trace__getToken, + ], + [ + 'trace', + Messages.trace_initialize, + ], + ]; + + for (let i = 0; i < INITIALIZE_MAX_ATTEMPTS_OF_GET_TOKEN; i++) { + res.push( + [ + 'trace', + Messages.trace_fetchToken, + ], + [ + 'error', + 'failed to fetch token: 400 error', + ], + ); + } + + return res; + })()); + }); + + it('one _getToken() in a time', async () => { + let getResolve: (data: unknown) => void; + + // eslint-disable-next-line no-promise-executor-return + (axios.get as Mock).mockImplementation(async () => new Promise((resolve) => { + getResolve = resolve; + })); + + const getTokenPromise1 = metadataTokenService.getToken(); + + const getTokenPromise2 = metadataTokenService.getToken(); + + // @ts-ignore + getResolve({ + status: 200, + statusText: 'ok', + data: { + token_type: 'Bearer', + access_token: '123', + expires_in: TTL, + }, + }); + + expect(await getTokenPromise1) + .toBe('123'); + + expect(await getTokenPromise2) + .toBe('123'); + + expect(testLoggerFn.mock.calls) + .toEqual([ + [ + 'trace', + Messages.trace_getToken, + ], + [ + 'trace', + Messages.trace__getToken, + ], + [ + 'trace', + Messages.trace_initialize, + ], + [ + 'trace', + Messages.trace_fetchToken, + ], + [ + 'trace', + Messages.trace_getToken, + ], + [ + 'trace', + Messages.trace_setIamResponse, + ], + [ + 'debug', + Messages.debug_new_token_was_received, + new HRInterval(TTL * 1000), + '', + ]]); + }); + + it('fetchToken() is not invoked again till tokenRefreshAt, even if there was an error', async () => { + (axios.get as Mock).mockReturnValue({ + status: 400, + statusText: 'error', + }); + + // @ts-ignore + // eslint-disable-next-line no-underscore-dangle + await expect(metadataTokenService.getToken()) + .rejects + .toThrow(new Error('failed to fetch token: 400 error')); + + // @ts-ignore + await fakeTimersFixture.advanceTimer((metadataTokenService as unknown).tokenRefreshAt - 20 - Date.now()); + + (axios.get as Mock).mockReturnValue({ + status: 200, + statusText: 'ok', + data: { + token_type: 'Bearer', + access_token: '123', + expires_in: TTL, + }, + }); + + // @ts-ignore + await expect(metadataTokenService.getToken()) + .rejects + .toThrow(new Error('failed to fetch token: 400 error')); + + await fakeTimersFixture.advanceTimer(20); // it's time to update + + // @ts-ignore + expect(await metadataTokenService.getToken()).toBe('123'); + }); + + it('in a while starts to report token TTL left', async () => { + (axios.get as Mock).mockReturnValue({ + status: 200, + statusText: 'ok', + data: { + token_type: 'Bearer', + access_token: '123', + expires_in: TTL, + }, + }); + + // @ts-ignore + expect(await metadataTokenService.getToken()) + .toBe('123'); + + testLoggerFn.mockReset(); + + (axios.get as Mock).mockReturnValue({ + status: 400, + statusText: 'error', + }); + + // @ts-ignore + await fakeTimersFixture.advanceTimer((metadataTokenService as unknown).tokenStartReportTTLAt - Date.now()); + + // @ts-ignore + expect(await metadataTokenService.getToken()) + .toBe('123'); + + // @ts-ignore + expect(testLoggerFn.mock.calls) + .toEqual([ + [ + 'trace', + Messages.trace_getToken, + ], + [ + 'trace', + Messages.trace__getToken, + ], + [ + 'trace', + Messages.trace_fetchToken, + ], + [ + 'error', + 'failed to fetch token: 400 error', + ], + [ + 'warn', + Messages.warn_emaining_token_TTL, + // @ts-ignore + new HRInterval((metadataTokenService as unknown).tokenExpiresAt - Date.now()), + ], + ]); + }); +}); diff --git a/src/__tests__/metadata-token-service/060.update-token-in-background.ts b/src/__tests__/metadata-token-service/060.update-token-in-background.ts new file mode 100644 index 00000000..c3d6e86e --- /dev/null +++ b/src/__tests__/metadata-token-service/060.update-token-in-background.ts @@ -0,0 +1,455 @@ +import axios from 'axios'; +import { FakeTimersFixture } from '../../utils/tests/fake-timers-fixture'; +import { getTestInitPromise, MetadataTokenService, setTestInitializeTimerAdvance } from '../../token-service/metadata-token-service'; +import Mock = jest.Mock; +import { buildTestLogger } from '../../utils/test-logger'; +import { HRInterval } from '../../utils/hr-interval'; +import { + Messages, + DEFAULT_URL, + DEFAULT_OPTIONS, +} from '../../token-service/metadata-token-service.consts'; + +const TTL = 10 * 60 * 60; + +describe('MetadataTokenService.getToken', () => { + const fakeTimersFixture = new FakeTimersFixture(); + + setTestInitializeTimerAdvance(async (duration) => { + await fakeTimersFixture.advanceTimer(duration); + }); + + const { + testLogger, + testLoggerFn, + } = buildTestLogger(); + + beforeEach(() => { + fakeTimersFixture.setup(); + jest.spyOn(axios, 'get'); + jest.spyOn(Math, 'random'); + (Math.random as Mock).mockReturnValue(1); + testLoggerFn.mockReset(); // clear constructor log + }); + + afterEach(async () => { + testLoggerFn.mockReset(); + jest.restoreAllMocks(); + await fakeTimersFixture.dispose(); + }); + + it('the first time the token is requested immediately when the service is created', async () => { + (axios.get as Mock).mockReturnValue({ + status: 200, + statusText: 'ok', + data: { + token_type: 'Bearer', + access_token: '123', + expires_in: TTL, + }, + }); + + const metadataTokenService = new MetadataTokenService({ + logger: testLogger, + doUpdateTokenInBackground: true, + }); + + await getTestInitPromise(); // wait till background tocken will be received + + expect(testLoggerFn.mock.calls) + .toEqual([ + [ + 'debug', + Messages.debug_ctor, + DEFAULT_URL, + true, + DEFAULT_OPTIONS, + ], + [ + 'trace', + Messages.trace_getToken, + ], + [ + 'trace', + Messages.trace__getToken, + ], + [ + 'trace', + Messages.trace_initialize, + ], + [ + 'trace', + Messages.trace_fetchToken, + ], + [ + 'trace', + Messages.trace_setIamResponse, + ], + [ + 'debug', + Messages.debug_new_token_was_received, + new HRInterval(TTL * 1000), + '', + ], + [ + 'trace', + Messages.trace_setTimer, + ], + ]); + + await metadataTokenService.destroy(); + }); + + it('the first time the token request has failed', async () => { + (axios.get as Mock).mockReturnValue({ + status: 400, + statusText: 'error', + }); + + const metadataTokenService = new MetadataTokenService({ + logger: testLogger, + doUpdateTokenInBackground: true, + }); + + await getTestInitPromise(); // wait till background error will be received + + testLoggerFn.mockReset(); // clear log + + await expect(metadataTokenService.getToken()) + .rejects + .toThrow(new Error('failed to fetch token: 400 error')); + + expect(testLoggerFn.mock.calls) + .toEqual([ + [ + 'trace', + Messages.trace_getToken, + ], + ]); + + await metadataTokenService.destroy(); + }); + + it('if getToken() is called until an initial response or an error is received, getToken() will wait for a response', async () => { + let getResolve: (data: unknown) => void; + + // eslint-disable-next-line no-promise-executor-return + (axios.get as Mock).mockImplementation(async () => new Promise((resolve) => { + getResolve = resolve; + })); + + const metadataTokenService = new MetadataTokenService({ + logger: testLogger, + doUpdateTokenInBackground: true, + }); + + const getTokenPromise1 = metadataTokenService.getToken(); + + const getTokenPromise2 = metadataTokenService.getToken(); + + // @ts-ignore + getResolve({ + status: 200, + statusText: 'ok', + data: { + token_type: 'Bearer', + access_token: '123', + expires_in: TTL, + }, + }); + + await getTestInitPromise(); // wait till background error will be received + + expect(await getTokenPromise1) + .toBe('123'); + + expect(await getTokenPromise2) + .toBe('123'); + + expect(testLoggerFn.mock.calls) + .toEqual([ + [ + 'debug', + Messages.debug_ctor, + DEFAULT_URL, + true, + DEFAULT_OPTIONS, + ], + [ + 'trace', + Messages.trace_getToken, + ], + [ + 'trace', + Messages.trace__getToken, + ], + [ + 'trace', + Messages.trace_initialize, + ], + [ + 'trace', + Messages.trace_fetchToken, + ], + [ + 'trace', + Messages.trace_getToken, + ], + [ + 'trace', + Messages.trace_getToken, + ], + [ + 'trace', + Messages.trace_setIamResponse, + ], + [ + 'debug', + Messages.debug_new_token_was_received, + new HRInterval(TTL * 1000), + '', + ], + [ + 'trace', + Messages.trace_setTimer, + ], + ]); + }); + + it('the token is regularly requested by timer, even if it not used - ok responses', async () => { + (axios.get as Mock).mockReturnValue({ + status: 200, + statusText: 'ok', + data: { + token_type: 'Bearer', + access_token: '123', + expires_in: TTL, + }, + }); + + const metadataTokenService = new MetadataTokenService({ + logger: testLogger, + doUpdateTokenInBackground: true, + }); + + await getTestInitPromise(); // wait till background error will be received + + testLoggerFn.mockReset(); + + const N = 5; + + for (let i = 0; i < N; i++) { + // @ts-ignore + // eslint-disable-next-line no-await-in-loop + await fakeTimersFixture.advanceTimer((metadataTokenService as unknown).tokenRefreshAt - Date.now()); + } + + expect(testLoggerFn.mock.calls) + .toEqual((() => { + const res: unknown[] = []; + + for (let i = 0; i < N; i++) { + res.push( + [ + 'trace', + Messages.trace__getToken, + ], + [ + 'trace', + Messages.trace_fetchToken, + ], + [ + 'trace', + Messages.trace_setIamResponse, + ], + [ + 'debug', + Messages.debug_new_token_was_received, + new HRInterval(TTL * 1000), + '', + ], + [ + 'trace', + Messages.trace_setTimer, + ], + ); + } + + return res; + })()); + }); + + it('the token is regularly requested by timer, even if it not used - error responses', async () => { + (axios.get as Mock).mockReturnValue({ + status: 200, + statusText: 'ok', + data: { + token_type: 'Bearer', + access_token: '123', + expires_in: TTL, + }, + }); + + const metadataTokenService = new MetadataTokenService({ + logger: testLogger, + doUpdateTokenInBackground: true, + }); + + await getTestInitPromise(); // wait till background error will be received + + testLoggerFn.mockReset(); + + (axios.get as Mock).mockReturnValue({ + status: 400, + statusText: 'error', + }); + + const N = 5; + + for (let i = 0; i < N; i++) { + // @ts-ignore + // eslint-disable-next-line no-await-in-loop + await fakeTimersFixture.advanceTimer((metadataTokenService as unknown).tokenRefreshAt - Date.now()); + } + + expect(testLoggerFn.mock.calls) + .toEqual((() => { + const res: unknown[] = []; + + for (let i = 0; i < N; i++) { + res.push( + [ + 'trace', + Messages.trace__getToken, + ], + [ + 'trace', + Messages.trace_fetchToken, + ], + [ + 'error', + 'failed to fetch token: 400 error', + ], + [ + 'trace', + Messages.trace_setTimer, + ], + ); + } + + return res; + })()); + }); + + it('when getToken() is called, no fetchToken() called, even if it\'s about time', async () => { + (axios.get as Mock).mockReturnValue({ + status: 400, + statusText: 'error', + }); + + // intentionally disable timer + // @ts-ignore + jest.spyOn(MetadataTokenService.prototype, 'setTimer'); + // @ts-ignore + ((MetadataTokenService.prototype as unknown).setTimer as Mock).mockReturnValue(); + + const metadataTokenService = new MetadataTokenService({ + logger: testLogger, + doUpdateTokenInBackground: true, + }); + + await getTestInitPromise(); // wait till background error will be received + + testLoggerFn.mockReset(); // clear log + + await expect(metadataTokenService.getToken()) + .rejects + .toThrow(new Error('failed to fetch token: 400 error')); + + // @ts-ignore + fakeTimersFixture.advanceTimer((metadataTokenService as unknown).tokenRefreshAt * 2); + + await expect(metadataTokenService.getToken()) + .rejects + .toThrow(new Error('failed to fetch token: 400 error')); + + expect(testLoggerFn.mock.calls) + .toEqual([ + [ + 'trace', + Messages.trace_getToken, + ], + [ + 'trace', + Messages.trace_getToken, + ], + ]); + }); + + it('dispose stops the timer and getToken() starts to throw an error', async () => { + (axios.get as Mock).mockReturnValue({ + status: 200, + statusText: 'ok', + data: { + token_type: 'Bearer', + access_token: '123', + expires_in: TTL, + }, + }); + + const metadataTokenService = new MetadataTokenService({ + logger: testLogger, + doUpdateTokenInBackground: true, + }); + + await getTestInitPromise(); // wait till background error will be received + + expect(await metadataTokenService.getToken()) + .toBe('123'); + + await expect(metadataTokenService.destroy()) + .resolves + .toBeUndefined(); + + await expect(metadataTokenService.getToken()) + .rejects + .toThrow(new Error(Messages.err_service_is_destroyed)); + + await expect(metadataTokenService.destroy()) + .rejects + .toThrow(new Error(Messages.err_service_is_destroyed)); + }); + + it('dispose() while initialize() is not complete', async () => { + let getResolve: (data: unknown) => void; + + // eslint-disable-next-line no-promise-executor-return + (axios.get as Mock).mockImplementation(async () => new Promise((resolve) => { + getResolve = resolve; + })); + + const metadataTokenService = new MetadataTokenService({ + logger: testLogger, + doUpdateTokenInBackground: true, + }); + + await metadataTokenService.destroy(); + + // @ts-ignore + getResolve({ + status: 200, + statusText: 'ok', + data: { + token_type: 'Bearer', + access_token: '123', + expires_in: TTL, + }, + }); + + await getTestInitPromise(); + + // @ts-ignore + expect((metadataTokenService as unknown).timer) + .toBeUndefined(); + }); +}); diff --git a/src/__tests__/utils/simple-logger.ts b/src/__tests__/utils/simple-logger.ts new file mode 100644 index 00000000..39171739 --- /dev/null +++ b/src/__tests__/utils/simple-logger.ts @@ -0,0 +1,187 @@ +import { SimpleLogger, setMockConsole } from '../../utils/simple-logger'; +import { FakeTimersFixture } from '../../utils/tests/fake-timers-fixture'; +import Mock = jest.Mock; + +describe('logger', () => { + let oldLL: string | undefined; + let mockConsole: typeof console | undefined; + + beforeEach(() => { + oldLL = process.env.LOG_LEVEL; + delete process.env.LOG_LEVEL; + setMockConsole(mockConsole = { + error: jest.fn(), + warn: jest.fn(), + info: jest.fn(), + debug: jest.fn(), + trace: jest.fn(), + } as unknown as typeof console); + }); + + afterEach(() => { + setMockConsole(mockConsole = undefined); + if (oldLL !== undefined) { + process.env.LOG_LEVEL = oldLL; + } + }); + + for (const env of [false, true, 'AAA']) { + for (const conf of [ + { + level: undefined, + fatal: true, + error: true, + warn: true, + info: true, + debug: false, + trace: false, + }, + { + level: SimpleLogger.LogLevel.warn, + fatal: true, + error: true, + warn: true, + info: false, + debug: false, + trace: false, + }, + { + level: SimpleLogger.LogLevel.debug, + fatal: true, + error: true, + warn: true, + info: true, + debug: true, + trace: false, + }, + { + level: 'wrong' as SimpleLogger.LogLevel, + fatal: true, + error: true, + warn: true, + info: true, + debug: true, + trace: true, + }]) { + if (conf.level === undefined && env) continue; // makes no sense - sounds like ENV says that level is undefined; + + // eslint-disable-next-line @typescript-eslint/no-loop-func + it(`general: ${env}, ${conf.level}`, () => { + let logger: SimpleLogger.Logger; + + switch (env) { + case false: + logger = conf.level === undefined + ? new SimpleLogger({ showLevel: false, showTimestamp: false }) + : new SimpleLogger({ level: conf.level, showLevel: false, showTimestamp: false }); + break; + case true: + process.env.LOG_LEVEL = conf.level; + logger = new SimpleLogger({ showLevel: false, showTimestamp: false }); + break; + default: + process.env[env] = conf.level; + logger = new SimpleLogger({ envKey: env, showLevel: false, showTimestamp: false }); + delete process.env[env]; + } + + logger.fatal('fatal'); + logger.error('error'); + logger.warn('warn'); + logger.info('info'); + logger.debug('debug'); + logger.trace('trace'); + + // @ts-ignore + expect((mockConsole.error as Mock).mock.calls) + .toHaveLength((conf.fatal ? 1 : 0) + (conf.error ? 1 : 0)); + // @ts-ignore + expect((mockConsole.warn as Mock).mock.calls) + .toHaveLength(conf.warn ? 1 : 0); + // @ts-ignore + expect((mockConsole.info as Mock).mock.calls) + .toHaveLength(conf.info ? 1 : 0); + // @ts-ignore + expect((mockConsole.debug as Mock).mock.calls) + .toHaveLength(conf.debug ? 1 : 0); + // @ts-ignore + expect((mockConsole.trace as Mock).mock.calls) + .toHaveLength(conf.trace ? 1 : 0); + }); + } + } + + it('different parameters of log method', () => { + const logger = new SimpleLogger({ showLevel: false, showTimestamp: false }); + + const a = { + b: { + a: {}, + }, + }; + + a.b.a = a; // circular object + + logger.info('simple message'); + logger.info('simple message with inline: %s', '123'); + logger.info(a, 'message with object'); + logger.info(a, 'message with object and inline: %d', 21); + logger.info(a); + + // @ts-ignore + expect((mockConsole.info as Mock).mock.calls) + .toEqual([ + ['simple message'], + ['simple message with inline: %s', '123'], + ['%o message with object', a], + ['%o message with object and inline: %d', 21, a], + ['%o', a], + ]); + }); + + for (const prefix of [undefined, 'test']) { + for (const showTimestamp of [false, true]) { + for (const showLevel of [false, true]) { + // eslint-disable-next-line @typescript-eslint/no-loop-func + it(`prefix, mshowTimestamp, showLevel: ${prefix}, ${showTimestamp}, ${showLevel}`, () => { + const fakeTimersFixture = new FakeTimersFixture(); + const logger = new SimpleLogger({ + prefix, + showTimestamp, + showLevel, + }); + + fakeTimersFixture.setup(); + + logger.warn('message'); + logger.warn({}); + logger.warn({}, 'text'); + + const msgPrefix: string[] = []; + + if (showTimestamp) { + msgPrefix.push(new Date().toISOString()); + } + + if (showLevel) { + msgPrefix.push('WARN'); + } + + if (prefix) { + msgPrefix.push(prefix); + } + + // @ts-ignore + expect((mockConsole.warn as Mock).mock.calls) + .toEqual([ + [`${msgPrefix.length === 0 ? '' : `[${msgPrefix.join(' ')}] `}message`], + [`${msgPrefix.length === 0 ? '%o' : `[${msgPrefix.join(' ')}] %o`}`, {}], + [`${msgPrefix.length === 0 ? '%o text' : `[${msgPrefix.join(' ')}] %o text`}`, {}], + ]); + + fakeTimersFixture.dispose(); + }); + } + } + } +}); diff --git a/src/index.ts b/src/index.ts index 0bd23339..97331bce 100644 --- a/src/index.ts +++ b/src/index.ts @@ -3,4 +3,6 @@ export * as cloudApi from './generated/yandex/cloud'; export * from './session'; export * from './utils/operation'; export * from './utils/decode-message'; -export { WrappedServiceClientType } from './types'; +export * from './utils/simple-logger'; +export { WrappedServiceClientType, TokenService } from './types'; +export * from './token-service/metadata-token-service'; diff --git a/src/token-service/metadata-token-service.consts.ts b/src/token-service/metadata-token-service.consts.ts new file mode 100644 index 00000000..2d159564 --- /dev/null +++ b/src/token-service/metadata-token-service.consts.ts @@ -0,0 +1,73 @@ +export const INITIALIZE_MAX_ATTEMPTS_OF_GET_TOKEN = 5; + +export const INITIALIZE_BACKOFF_SLOT_DURATION = 5; // ms + +export const INITIALIZE_BACKOFF_CEILING = 3; // up to 5 * (2 ** 3) = 40 ms + +export const INITIALIZE_BACKOFF_UNCERTAIN_RATIO = 0.5; + +export const GET_TOKEN_BACKOFF_SLOT_DURATION = 1 * 30 * 1000; // every 30 mins + +export const GET_TOKEN_BACKOFF_CEILING = 1; // up to 30 * (2 ** 1) = 60 min + +export const GET_TOKEN_BACKOFF_UNCERTAIN_RATIO = 0.3; + +export const TOKEN_MINIMUM_LIFETIME_MARGIN_MS = 15 * 60 * 1000; + +export const TOKEN_LIFETIME_LEFT_TO_REFRESH_PCT = 90; + +export const TOKEN_LIFETIME_LEFT_TO_REPORT_ERROR_PCT = 80; + +export const ERROR_REPORT_INTERVAL_MS = 1 * 60 * 1000; + +export const enum Messages { + /** + * Inlines: new HRInterval(). + */ + warn_emaining_token_TTL = 'remaining token\'s TTL: %s', + + /** + * Inlines: url, doUpdateTokenInBackground, axiosOptions. + */ + debug_ctor = 'metadata-token.ctor: url: %s, doUpdateTokenInBackground: %s, axiosOptions: %o', + /** + * Inlines: new HRInterval(), "(too small TTL)"?. + */ + debug_new_token_was_received = 'new token. TTL: %s%s', + debug_new_token_was_received_too_small_postfix = ' (too small TTL)', + + trace_getToken = 'metadata-token.getToken', + trace_destroy = 'metadata-token.destroy', + trace__getToken = 'metadata-token._getToken', + trace_initialize = 'metadata-token.initialize', + trace_fetchToken = 'metadata-token.fetchToken', + trace_setIamResponse = 'metadata-token.setIamResponse', + trace_setTimer = 'metadata-token.setTimer', + + /** + * Inlines: http-status, error-message. + */ + err_part_http_error = '%s %s', + /** + * Inlines: new HRInerval(). + */ + err_part_insufficient_lifetime = 'insufficient lifetime: %s', + /** + * Inlines: err_part + */ + err_failed_fetch = 'failed to fetch token: %s', + + err_invalid_iam_token = 'invalid iam token: %o', + + err_service_is_destroyed = 'service is destroyed', +} + +export const DEFAULT_URL = 'http://169.254.169.254/computeMetadata/v1/instance/service-accounts/default/token'; + +export const DEFAULT_OPTIONS = { + headers: { + 'Metadata-Flavor': 'Google', + }, +}; + +export const DEFAULT_LOGGER_PREFIX = 'metadata-auth'; diff --git a/src/token-service/metadata-token-service.md b/src/token-service/metadata-token-service.md new file mode 100644 index 00000000..2b8aa783 --- /dev/null +++ b/src/token-service/metadata-token-service.md @@ -0,0 +1,52 @@ +# class MetadataTokenService + +## General rules of getting a token from Iam service + +Issuance of tokens shall be in accordance with the following rules + +When accessing APIs of other Cloud services (including the database), services should follow the standard rules of +working with tokens recommended by the documentation: + +- A token is issued in the way most appropriate for this service: for example, it can be a metadata service for SVM, Token Agent + for iron hosts or JWT for services hosted outside the Cloud perimeter + +- The application does not start (does not start accepting requests) until its system SA token has been successfully issued; + the application does not start if the token issued at the moment of start is valid for less than 15 minutes; + +- The token issued at start time is used for a time equal to at least 10% of the difference between expires_at and the time + the token was issued; + +- A token that has been used within the time specified in the previous paragraph is subject to update: the application starts + a background process that reissues the token of its system SA, while all current requests continue to be made with the + cached token (thus, in case of any problems with token reissue, 90% of the token's lifetime will be left to notice and + correct the situation); + +- It is recommended that applications have a system SA token usage time monitor, which should be lit if the token lifetime + approaches 20% of the difference between expires_at and the token's expiration time. + +## MetadataTokenService.getToken() implementation details + +No matter how many parallel getToken() calls are made, there will always be only one request to the Iam service at a time. + +If there is a valid previously received token, then it is returned. + +The request for a new token will be executed after a period depending on the TOKEN_LIFETIME_LEFT_TO_REFRESH_PCT constant. + +In case of an error, the repeated request will be executed after a period depending on the constants GET_TOKEN_BACKOFF_..... +and a random number. + +If a token update is not received for a long time, warnings are started to be logged when time to expire is less then TOKEN_LIFETIME_LEFT_TO_REPORT_ERROR_PCT. Error are logged not more often than ERROR_REPORT_INTERVAL_MS. + +The token returned by getToken() must have at least TOKEN_MINIMUM_LIFETIME_MARGIN_MS before it expires. + +If there is no old valid token and a new one cannot be obtained, an error is thrown. + +For mandatory token retrieval several attempts can be made with intervals counted taking into account INITIALIZE_BACKOFF_.... constants and a random number. No more than INITIALIZE_MAX_ATTEMPTS_OF_GET_TOKEN attempts are made in this case. + +## Logging + +By default, errors and the message that errors are completed are written to console.log(). But you can catch them by +implementing custom logging. + + + diff --git a/src/token-service/metadata-token-service.ts b/src/token-service/metadata-token-service.ts index 03a83966..e31bfaaa 100644 --- a/src/token-service/metadata-token-service.ts +++ b/src/token-service/metadata-token-service.ts @@ -1,78 +1,350 @@ import axios, { AxiosRequestConfig } from 'axios'; - +import * as util from 'util'; import { TokenService } from '../types'; +import { SimpleLogger } from '../utils/simple-logger'; + +import { + INITIALIZE_BACKOFF_CEILING, + INITIALIZE_BACKOFF_SLOT_DURATION, + INITIALIZE_BACKOFF_UNCERTAIN_RATIO, + INITIALIZE_MAX_ATTEMPTS_OF_GET_TOKEN, + GET_TOKEN_BACKOFF_SLOT_DURATION, + GET_TOKEN_BACKOFF_CEILING, + GET_TOKEN_BACKOFF_UNCERTAIN_RATIO, + TOKEN_LIFETIME_LEFT_TO_REFRESH_PCT, + TOKEN_LIFETIME_LEFT_TO_REPORT_ERROR_PCT, + TOKEN_MINIMUM_LIFETIME_MARGIN_MS, + Messages, + DEFAULT_URL, + DEFAULT_LOGGER_PREFIX, + DEFAULT_OPTIONS, +} from './metadata-token-service.consts'; +import { HRInterval } from '../utils/hr-interval'; -type Options = AxiosRequestConfig; +/** + * Since in the metadata token service the initialize() method contains a loop of retries to get the token, so + * to keep this loop from hanging on the timer while unit tests, we need to advance the timer between retries inside initialize(). + */ +let testInitializeTimerAdvance: (duration: number) => void | undefined; -const DEFAULT_URL = 'http://169.254.169.254/computeMetadata/v1/instance/service-accounts/default/token'; -const DEFAULT_OPTIONS: Options = { - headers: { - 'Metadata-Flavor': 'Google', - }, +/** + * **Only for unit tests purposes**. + */ +export const setTestInitializeTimerAdvance = (timerAdvance: (duration: number) => void) => { + testInitializeTimerAdvance = timerAdvance; }; +let testInitPromise: Promise; + +/** + * **Only for unit tests purposes**. + */ +export const getTestInitPromise = (): Promise | undefined => testInitPromise; + +// eslint-disable-next-line import/export export class MetadataTokenService implements TokenService { private readonly url: string; - private readonly opts: Options; + private readonly axiosOptions: AxiosRequestConfig; + private readonly doUpdateTokenInBackground; + private readonly logger; private token?: string; + /** + * The time after which the token cannot be used. This is the token expiration time minus the minimum time that + * must be available (TOKEN_MINIMUM_LIFETIME_MARGIN_MS). + */ + private tokenExpiresAt = 0; + /** + * Time of scheduled token renewal. Counted as a percentage of the initial time for which the token + * was issued (TOKEN_LIFETIME_LEFT_TO_REFRESH_PCT). + */ + private tokenRefreshAt = 0; + /** + * Time after which to issue warning messages about how much time the available token has left. Counted as + * a percentage of the time for which the token was issued (TOKEN_LIFETIME_LEFT_TO_REPORT_ERROR_PCT). + */ + private tokenStartReportTTLAt = 0; + /** + * Error that is returned when retrying getToken(), if the last fetchToken() returned an error and + * the time of the retry has not yet arrived. + */ + private tokenLastError?: Error; + /** + * Counter of repeated errors received in fetchToken() to make repeated requests with less frequency. + */ + private tokenErrorCount = 0; + + private currentGetTokenPromise?: Promise; + private timer?: NodeJS.Timeout; + private destroyed = false; + + constructor(options?: MetadataTokenService.Options); + + constructor(url: string, options?: MetadataTokenService.Options); + + /* istanbul ignore next */ + constructor(urlOrOptions: string | MetadataTokenService.Options = DEFAULT_URL, options: MetadataTokenService.Options = {}) { + if (typeof urlOrOptions === 'object') { + this.url = DEFAULT_URL; + // eslint-disable-next-line no-param-reassign + options = urlOrOptions; + } else { + this.url = urlOrOptions; + } + + const { + logger, + doUpdateTokenInBackground, + ...opts + } = options; - constructor(url: string = DEFAULT_URL, options: Options = DEFAULT_OPTIONS) { - this.url = url; - this.opts = options; + /* istanbul ignore next */ + this.logger = logger ?? new SimpleLogger({ prefix: DEFAULT_LOGGER_PREFIX }); + + this.doUpdateTokenInBackground = doUpdateTokenInBackground ?? false; + + this.axiosOptions = { ...DEFAULT_OPTIONS, ...opts }; + + this.logger.debug(Messages.debug_ctor, this.url, this.doUpdateTokenInBackground, this.axiosOptions); + + if (this.doUpdateTokenInBackground) { + testInitPromise = this.getToken() + .catch(() => {}); // intentionally without await + } } async getToken(): Promise { - if (!this.token) { - await this.initialize(); + this.logger.trace(Messages.trace_getToken); - if (!this.token) { - throw new Error('Token is empty after MetadataTokenService.initialize'); - } + if (this.destroyed) { + throw new Error(Messages.err_service_is_destroyed); + } - return this.token; + if (this.doUpdateTokenInBackground && (this.tokenLastError || this.token)) { + if (this.tokenLastError) throw this.tokenLastError; + + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + return this.token!; } - return this.token; + if (!this.currentGetTokenPromise) { + this.currentGetTokenPromise = this._getToken() + .finally(() => { + delete this.currentGetTokenPromise; + }); + } + + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + return this.currentGetTokenPromise!; } - private async fetchToken(): Promise { - const res = await axios.get<{ access_token: string }>(this.url, this.opts); + /** + * It is recommended to call this method at the beginning of serverless function with context.token parameter. + * This will reduce serverless function runtime avoiding additional request to the Iam service. + */ + setIamToken(iamResponse: MetadataTokenService.IamToken) { + this.logger.trace(Messages.trace_setIamResponse); - if (res.status !== 200) { - throw new Error(`failed to fetch token from metadata service: ${res.status} ${res.statusText}`); + if (!( + typeof (iamResponse as unknown) === 'object' + && (iamResponse as unknown) !== null + && iamResponse.token_type === 'Bearer' + && typeof (iamResponse.expires_in as unknown) === 'number' + && iamResponse.expires_in > 0 + && typeof (iamResponse.access_token as unknown) === 'string' + && iamResponse.access_token !== '' + )) { + throw new Error(util.format(Messages.err_invalid_iam_token, iamResponse)); } - return res.data.access_token; + const TTL = iamResponse.expires_in * 1000 - TOKEN_MINIMUM_LIFETIME_MARGIN_MS; + + // even in non-debug mode output messages that we've got a new token, if there were errors before that + this.logger[this.tokenLastError ? 'info' : 'debug']( + Messages.debug_new_token_was_received, + new HRInterval(iamResponse.expires_in * 1000), + TTL <= 0 ? Messages.debug_new_token_was_received_too_small_postfix : '', + ); + + if (TTL <= 0) { + throw new Error(util.format( + Messages.err_part_insufficient_lifetime, + new HRInterval(iamResponse.expires_in * 1000), + )); + } + + this.token = iamResponse.access_token; + this.tokenExpiresAt = Date.now() + TTL; + this.tokenRefreshAt = Date.now() + iamResponse.expires_in * 1000 * (1 - TOKEN_LIFETIME_LEFT_TO_REFRESH_PCT / 100); + this.tokenStartReportTTLAt = Date.now() + iamResponse.expires_in * 1000 * (1 - TOKEN_LIFETIME_LEFT_TO_REPORT_ERROR_PCT / 100); + + delete this.tokenLastError; + this.tokenErrorCount = 0; } - private async initialize() { - if (this.token) { - return; + async destroy() { + this.logger.trace(Messages.trace_destroy); + + if (this.destroyed) { + throw new Error(Messages.err_service_is_destroyed); + } + this.destroyed = true; + + if (this.timer) { + clearTimeout(this.timer); + delete this.timer; + } + } + + private async _getToken(): Promise { + this.logger.trace(Messages.trace__getToken); + + try { + if (this.tokenLastError && Date.now() < this.tokenRefreshAt) { + throw this.tokenLastError; + } + + try { + if (!this.token || Date.now() >= this.tokenExpiresAt) { + await this.initialize(); + } else if (Date.now() >= this.tokenRefreshAt) { + try { + this.token = await this.fetchToken(); + } catch { + // keep using existing token + if (Date.now() >= this.tokenStartReportTTLAt) { + this.logger.warn( + Messages.warn_emaining_token_TTL, + new HRInterval(this.tokenExpiresAt - Date.now()), + ); + } + } + } + } catch (error) { + this.tokenLastError = error as Error; + + throw error; + } + } finally { + if (this.doUpdateTokenInBackground && !this.destroyed) { + this.setTimer(); + } } - let lastError = null; + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + return this.token!; + } + + private async initialize() { + this.logger.trace(Messages.trace_initialize); + + delete this.token; - for (let i = 0; i < 5; i++) { + for (let i = 1; ; i++) { try { // eslint-disable-next-line no-await-in-loop this.token = await this.fetchToken(); + break; } catch (error) { - lastError = error; + if (i >= INITIALIZE_MAX_ATTEMPTS_OF_GET_TOKEN) { + throw error; + } } + + // Repeated attempts to get token in case of an error are performed at increasing intervals and adding + // a random component - to statistically distribute the load on the IAM. + + // eslint-disable-next-line no-bitwise + const slotsCount = 1 << Math.min(i, INITIALIZE_BACKOFF_CEILING); + const maxDuration = slotsCount * INITIALIZE_BACKOFF_SLOT_DURATION; + const duration = Math.trunc(maxDuration * (1 - Math.random() * INITIALIZE_BACKOFF_UNCERTAIN_RATIO)); + + // eslint-disable-next-line no-await-in-loop,@typescript-eslint/no-loop-func,no-async-promise-executor + await new Promise(async (resolve, reject) => { + try { + setTimeout(resolve, duration); + + // In tests, promote the timer immediately + if (testInitializeTimerAdvance) await testInitializeTimerAdvance(duration); + } catch (error) { + /* istanbul ignore next */ + reject(error); + } + }); } - if (!this.token) { - throw new Error( - // eslint-disable-next-line @typescript-eslint/restrict-template-expressions - `failed to fetch token from metadata service: ${lastError}`, - ); + } + + private async fetchToken(): Promise { + this.logger.trace(Messages.trace_fetchToken); + + let token: string; + + // @ts-ignore + try { + // eslint-disable-next-line @typescript-eslint/ban-types + const res = await axios.get(this.url, this.axiosOptions); + + if (res.status !== 200) { + throw new Error(util.format(Messages.err_part_http_error, res.status, res.statusText)); + } + + this.setIamToken(res.data); + + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + return this.token!; + } catch (error_) { + const error = error_ as Error; + + // eslint-disable-next-line no-bitwise + const slotsCount = 1 << Math.min(this.tokenErrorCount++, GET_TOKEN_BACKOFF_CEILING); + const maxDuration = slotsCount * GET_TOKEN_BACKOFF_SLOT_DURATION; + + this.tokenRefreshAt = Date.now() + maxDuration * (1 - Math.random() * GET_TOKEN_BACKOFF_UNCERTAIN_RATIO); + + error.message = util.format(Messages.err_failed_fetch, error.message); + + this.logger.error(error.message); + + throw error; + } + } + + private setTimer() { + this.logger.trace(Messages.trace_setTimer); + + if (this.timer) { + clearTimeout(this.timer); } - setTimeout(async () => { + + this.timer = setTimeout(async () => { try { - this.token = await this.fetchToken(); + this.token = await this._getToken(); } catch { - // TBD + // nothing - keep using old token or an old error } - }, 30_000); + }, this.tokenRefreshAt - Date.now()); } } + +/* istanbul ignore next */ +// eslint-disable-next-line @typescript-eslint/no-namespace,import/export +export namespace MetadataTokenService { + export type Options = AxiosRequestConfig & { + /** + * Any logger that have methods required by SimpleLogger.Logger. Default is new instance of SimpleLogger. + */ + logger?: SimpleLogger.Logger, + + /** + * The default is false and the token is always checked that it has not expired each time the getToken() + * method is called. If the token is out of date, a new token is requested. In this mode, the update logic works well + * in both standard nodejs and yandex cloud serverless modes. However, in standard nodejs, you can improve the process + * of token exposure by enabling timed updates in the background. In serverless mode, timer updates are likely to be problematic, + * because when the serverless function is not called, the timer events do not work. That's why you should turn it to true + * only when you are **absolutely sure** that the code will not be used in serverless mode. + */ + doUpdateTokenInBackground?: boolean, + }; + + export type IamToken = { token_type: 'Bearer', access_token: string, /** in seconds! */ expires_in: number }; +} diff --git a/src/types.ts b/src/types.ts index 18bda45f..eda5fbee 100644 --- a/src/types.ts +++ b/src/types.ts @@ -11,6 +11,7 @@ import { RetryOptions } from './middleware/retry'; export interface TokenService { getToken: () => Promise; + destroy?: () => void; } export interface GeneratedServiceClientCtor { diff --git a/src/utils/hr-interval.ts b/src/utils/hr-interval.ts new file mode 100644 index 00000000..cef8ac67 --- /dev/null +++ b/src/utils/hr-interval.ts @@ -0,0 +1,20 @@ +/** + * Formats time interval in human-readable form in toString(). + */ +export class HRInterval { + constructor(readonly period: number) {} + toString() { + let remaining = Math.trunc(this.period / 1000); + const hours = Math.trunc(remaining / 60 / 60); + + remaining -= hours * 60 * 60; + const minutes = Math.trunc(remaining / 60); + + const seconds = remaining - minutes * 60; + + return `${hours.toString() + .padStart(2, '0')}:${minutes.toString() + .padStart(2, '0')}:${seconds.toString() + .padStart(2, '0')}`; + } +} diff --git a/src/utils/logger.ts b/src/utils/logger.ts deleted file mode 100644 index 60a4cf2c..00000000 --- a/src/utils/logger.ts +++ /dev/null @@ -1,5 +0,0 @@ -import log4js from 'log4js'; - -export const logger = log4js.getLogger(); - -logger.level = process.env.LOG_LEVEL || 'warn'; diff --git a/src/utils/simple-logger.ts b/src/utils/simple-logger.ts new file mode 100644 index 00000000..e0242aff --- /dev/null +++ b/src/utils/simple-logger.ts @@ -0,0 +1,162 @@ +const DEFAULT_ENV_KEY = 'LOG_LEVEL'; + +const DEFAULT_LEVEL = 'info'; + +const silentLogFn = () => {}; + +const simpleLogFnBuilder = (level: SimpleLogger.LogLevel): SimpleLogger.LogFn => { + const LEVEL = level.toUpperCase(); + + // eslint-disable-next-line @typescript-eslint/no-use-before-define + if (level === SimpleLogger.LogLevel.fatal) { + // eslint-disable-next-line @typescript-eslint/no-use-before-define,no-param-reassign + level = SimpleLogger.LogLevel.error; + } + + return function log(this: SimpleLogger, objOrMsg: string | unknown, ...args: unknown[]) { + const prefix: string[] = []; + + if (this.showTimestamp) { + prefix.push(new Date().toISOString()); + } + + if (this.showLevel) { + prefix.push(LEVEL); + } + + if (this.prefix) { + prefix.push(this.prefix); + } + + const prefixStr = prefix.length === 0 ? '' : `[${prefix.join(' ')}] `; + + if (typeof objOrMsg === 'object') { + if (typeof args[0] !== 'string') { + // @ts-ignore + // eslint-disable-next-line @typescript-eslint/no-use-before-define + consoleOrMock[level](prefix.length > 0 ? `${prefixStr}%o` : '%o', objOrMsg); + } else { + // @ts-ignore + // eslint-disable-next-line @typescript-eslint/no-use-before-define + consoleOrMock[level](`${prefixStr}%o ${args[0]}`, ...args.splice(1), objOrMsg); + } + } else { + // @ts-ignore + // eslint-disable-next-line @typescript-eslint/no-use-before-define + consoleOrMock[level](`${prefixStr}${objOrMsg}`, ...args); + } + }; +}; + +/** + * The simplest logger class, with a minimal set of logging methods and the most simple output to the console. + */ +// eslint-disable-next-line import/export +export class SimpleLogger implements SimpleLogger.Logger { + fatal: SimpleLogger.LogFn = silentLogFn; + error: SimpleLogger.LogFn = silentLogFn; + warn: SimpleLogger.LogFn = silentLogFn; + info: SimpleLogger.LogFn = silentLogFn; + debug: SimpleLogger.LogFn = silentLogFn; + trace: SimpleLogger.LogFn = silentLogFn; + + readonly prefix?: string; + + readonly showTimestamp: boolean; + readonly showLevel: boolean; + + constructor(options: { + /** + * Level down to which to log messages. Default is *info*. + */ + level?: SimpleLogger.LogLevel, + /** + * Prefix that gets added to a message, default undefined + */ + prefix?: string, + /** + * Whether to add the date and time to the message. Default is true. + */ + showTimestamp?: boolean, + /** + * Whether to add the message level. Default is true. + */ + showLevel?: boolean, + /** + * Environment variable with logging level, which if specified contains the level of + * logging - *error*, *warn*, *info*, *debug*, *trace*. If not specified, the value of + * level parameter is used. If a non-existing level value is specified, all levels are logged. + */ + envKey?: string, + } = {}) { + let { + level, + // eslint-disable-next-line prefer-const + prefix, + // eslint-disable-next-line prefer-const + showTimestamp, + // eslint-disable-next-line prefer-const + showLevel, + } = options; + + if (prefix) this.prefix = prefix; + this.showTimestamp = showTimestamp ?? true; + this.showLevel = showLevel ?? true; + + const envKey = options.envKey ?? DEFAULT_ENV_KEY; + const envLevel = process.env[envKey]; + + // @ts-ignore + level = envLevel !== undefined ? SimpleLogger.LogLevel[envLevel] : level ?? SimpleLogger.LogLevel[DEFAULT_LEVEL]; + + for (const lvl of Object.values(SimpleLogger.LogLevel)) { + // @ts-ignore + this[lvl] = simpleLogFnBuilder(lvl); + if (lvl === level) break; + } + } +} + +/* istanbul ignore next */ +// eslint-disable-next-line @typescript-eslint/no-namespace,import/export +export namespace SimpleLogger { + export interface LogFn { + (obj: unknown, msg?: string, ...args: unknown[]): void; + + (msg: string, ...args: unknown[]): void; + } + + /** + * The simplest interface, containing only the necessary methods used in the project. + * Therefore, *fatal* and *trace* methods are omitted. + */ + export interface Logger { + fatal: LogFn, + error: LogFn, + warn: LogFn, + info: LogFn, + debug: LogFn, + trace: LogFn, + } + + export enum LogLevel { + fatal = 'fatal', + error = 'error', + warn = 'warn', + info = 'info', + debug = 'debug', + trace = 'trace', + } +} + +/** + * For unit tests purposes. + */ +let consoleOrMock = console; + +/** + * **Only for unit tests purposes**. + */ +export const setMockConsole = (mockConsole: Console = console) => { + consoleOrMock = mockConsole; +}; diff --git a/src/utils/test-logger.ts b/src/utils/test-logger.ts new file mode 100644 index 00000000..d1fff4d4 --- /dev/null +++ b/src/utils/test-logger.ts @@ -0,0 +1,15 @@ +import { SimpleLogger } from './simple-logger'; + +export const buildTestLogger = () => { + const testLoggerFn = jest.fn(); + const testLogger = { + fatal: testLoggerFn.bind(undefined, 'fatal') as SimpleLogger.LogFn, + error: testLoggerFn.bind(undefined, 'error') as SimpleLogger.LogFn, + warn: testLoggerFn.bind(undefined, 'warn') as SimpleLogger.LogFn, + info: testLoggerFn.bind(undefined, 'info') as SimpleLogger.LogFn, + debug: testLoggerFn.bind(undefined, 'debug') as SimpleLogger.LogFn, + trace: testLoggerFn.bind(undefined, 'trace') as SimpleLogger.LogFn, + }; + + return { testLogger, testLoggerFn }; +}; diff --git a/src/utils/tests/fake-timers-fixture.ts b/src/utils/tests/fake-timers-fixture.ts new file mode 100644 index 00000000..270ee6ab --- /dev/null +++ b/src/utils/tests/fake-timers-fixture.ts @@ -0,0 +1,37 @@ +/** + * Guarantees that once the time has arrived, all functions called from setTimeout() will be finished by the time + * await fakeTimersFixture.advanceTimer(...) ends. It is important if an async function is passed to setTimeout(). + */ +export class FakeTimersFixture { + setup() { + jest.useFakeTimers(); + // @ts-ignore + this.prevSetTimeout = global.setTimeout; + + // adds to this.timesouts array the handlers for which the time has come. so whould be possible to make sure that all handlers, + // including asynchronous handlers, will be completed - see below. + // @ts-ignore + global.setTimeout = (handler: (...any) => void, timeout?: number, ...args: unknown[]) => this.prevSetTimeout?.(() => { + this.timeouts.push(handler(...args)); // call the handler only when the fake timer comes to necessary point if time + }, timeout); + } + async advanceTimer(msToRun: number) { + jest.advanceTimersByTime(msToRun); + + // this additional logic ensures that by the end of advanceTimer(), all handlers including asynchronous + // handlers on timers will be completed + // eslint-disable-next-line @typescript-eslint/ban-types + await Promise.all(this.timeouts); + this.timeouts.length = 0; + } + + dispose() { + // @ts-ignore + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + global.setTimeout = this.prevSetTimeout!; + jest.useRealTimers(); + } + + private prevSetTimeout?: (handler: (...args: unknown[]) => void, timeout?: number, ...args: unknown[]) => void; + private timeouts: unknown[] = []; +}