diff --git a/e2e/package.json b/e2e/package.json index 3a75137498d..aaaa86a4403 100644 --- a/e2e/package.json +++ b/e2e/package.json @@ -38,7 +38,7 @@ "tsify": "^3.0.4", "tslint": "^6.1.3", "tslint-no-circular-imports": "~0.7.0", - "typescript": "3.5.3" + "typescript": "4.4.2" }, "scripts": { "build": "tsc", diff --git a/e2e/yarn.lock b/e2e/yarn.lock index 0c7395f2006..6aef079b7dd 100644 --- a/e2e/yarn.lock +++ b/e2e/yarn.lock @@ -1048,10 +1048,15 @@ "@types/node" "*" form-data "^3.0.0" -"@types/node@*", "@types/node@>=10.0.0": - version "17.0.23" - resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.23.tgz#3b41a6e643589ac6442bdbd7a4a3ded62f33f7da" - integrity sha512-UxDxWn7dl97rKVeVS61vErvw086aCYhDLyvRQZ5Rk65rZKepaFdm53GeqXaKBuOhED4e9uWq34IC3TdSdJJ2Gw== +"@types/node@*": + version "17.0.19" + resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.19.tgz#726171367f404bfbe8512ba608a09ebad810c7e6" + integrity sha512-PfeQhvcMR4cPFVuYfBN4ifG7p9c+Dlh3yUZR6k+5yQK7wX3gDgVxBly4/WkBRs9x4dmcy1TVl08SY67wwtEvmA== + +"@types/node@>=10.0.0": + version "14.14.36" + resolved "https://registry.yarnpkg.com/@types/node/-/node-14.14.36.tgz#5637905dbb15c30a33a3c65b9ef7c20e3c85ebad" + integrity sha512-kjivUwDJfIjngzbhooRnOLhGYz6oRFi+L+EpMjxroDYXwDw9lHrJJ43E+dJ6KAd3V3WxWAJ/qZE9XKYHhjPOFQ== "@types/offscreencanvas@~2019.3.0": version "2019.3.0" @@ -4079,10 +4084,10 @@ typed-function@^2.0.0: resolved "https://registry.yarnpkg.com/typed-function/-/typed-function-2.1.0.tgz#ded6f8a442ba8749ff3fe75bc41419c8d46ccc3f" integrity sha512-bctQIOqx2iVbWGDGPWwIm18QScpu2XRmkC19D8rQGFsjKSgteq/o1hTZvIG/wuDq8fanpBDrLkLq+aEN/6y5XQ== -typescript@3.5.3: - version "3.5.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977" - integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g== +typescript@4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.2.tgz#6d618640d430e3569a1dfb44f7d7e600ced3ee86" + integrity sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ== ua-parser-js@^0.7.30: version "0.7.31" diff --git a/link-package/yarn.lock b/link-package/yarn.lock index 08aa01eb79b..c2faf18259a 100644 --- a/link-package/yarn.lock +++ b/link-package/yarn.lock @@ -5,6 +5,9 @@ "@tensorflow/tfjs-backend-cpu@link:../dist/bin/tfjs-backend-webgl/link-package/node_modules/@tensorflow/tfjs-backend-cpu": version "0.0.0" +"@tensorflow/tfjs-backend-cpu@link:../dist/bin/tfjs-backend-webgpu/link-package-core/node_modules/@tensorflow/tfjs-backend-cpu": + version "0.0.0" + "@tensorflow/tfjs-backend-cpu@link:../link-package-core/node_modules/@tensorflow/tfjs-backend-cpu": version "0.0.0" uid "" @@ -12,13 +15,18 @@ "@tensorflow/tfjs-backend-webgl@file:../dist/bin/tfjs-backend-webgl/tfjs-backend-webgl_pkg": version "0.0.0" dependencies: - "@tensorflow/tfjs-backend-cpu" "link:../../../.cache/yarn/v6/npm-@tensorflow-tfjs-backend-webgl-0.0.0-10003ece-7ef2-4086-b2e3-b4d246691b03-1647374227759/node_modules/@tensorflow/link-package/node_modules/@tensorflow/tfjs-backend-cpu" + "@tensorflow/tfjs-backend-cpu" "link:../../../.cache/yarn/v6/npm-@tensorflow-tfjs-backend-webgl-0.0.0-b901bcae-753e-4bf2-becd-93228d006c89-1650661711500/node_modules/@tensorflow/link-package/node_modules/@tensorflow/tfjs-backend-cpu" "@types/offscreencanvas" "~2019.3.0" "@types/seedrandom" "2.4.27" "@types/webgl-ext" "0.0.30" "@types/webgl2" "0.0.6" seedrandom "2.4.3" +"@tensorflow/tfjs-backend-webgpu@file:../dist/bin/tfjs-backend-webgpu/tfjs-backend-webgpu_pkg": + version "0.0.1-alpha.4" + dependencies: + "@tensorflow/tfjs-backend-cpu" "link:../../../.cache/yarn/v6/npm-@tensorflow-tfjs-backend-webgpu-0.0.1-alpha.4-223b403f-04fa-4af6-953e-433de81cef9e-1650661711502/node_modules/@tensorflow/link-package-core/node_modules/@tensorflow/tfjs-backend-cpu" + "@tensorflow/tfjs-converter@file:../dist/bin/tfjs-converter/tfjs-converter_pkg": version "0.0.0" @@ -45,17 +53,17 @@ integrity sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w== "@types/node-fetch@^2.1.2": - version "2.6.1" - resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.1.tgz#8f127c50481db65886800ef496f20bbf15518975" - integrity sha512-oMqjURCaxoSIsHSr1E47QHzbmzNR5rK8McHuNb11BOM9cHcIK3Avy0s/b2JlXHoQGTYS3NsvWzV1M0iK7l0wbA== + version "2.5.12" + resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.5.12.tgz#8a6f779b1d4e60b7a57fb6fd48d84fb545b9cc66" + integrity sha512-MKgC4dlq4kKNa/mYrwpKfzQMB5X3ee5U6fSprkKpToBqBmX4nFZL9cW5jl6sWn+xpRJ7ypWh2yyqqr8UUCstSw== dependencies: "@types/node" "*" form-data "^3.0.0" "@types/node@*": - version "17.0.21" - resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.21.tgz#864b987c0c68d07b4345845c3e63b75edd143644" - integrity sha512-DBZCJbhII3r90XbQxI8Y9IjjiiOGlZ0Hr32omXIZvwwZ7p4DMMXGrKXVyPfuoBOri9XNtL0UK69jYIBIsRX3QQ== + version "16.11.6" + resolved "https://registry.yarnpkg.com/@types/node/-/node-16.11.6.tgz#6bef7a2a0ad684cf6e90fcfe31cecabd9ce0a3ae" + integrity sha512-ua7PgUoeQFjmWPcoo9khiPum3Pd60k4/2ZGXt18sm2Slk0W0xZTqt5Y0Ny1NyBiN1EVQ/+FaF9NcY4Qe6rwk5w== "@types/offscreencanvas@~2019.3.0": version "2019.3.0" @@ -156,17 +164,17 @@ long@4.0.0: resolved "https://registry.yarnpkg.com/long/-/long-4.0.0.tgz#9a7b71cfb7d361a194ea555241c92f7468d5bf28" integrity sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA== -mime-db@1.52.0: - version "1.52.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70" - integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg== +mime-db@1.50.0: + version "1.50.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.50.0.tgz#abd4ac94e98d3c0e185016c67ab45d5fde40c11f" + integrity sha512-9tMZCDlYHqeERXEHO9f/hKfNXhre5dK2eE/krIvUjZbS2KPcqGDfNShIWS1uW9XOTKQKqK6qbeOci18rbfW77A== mime-types@^2.1.12: - version "2.1.35" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" - integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== + version "2.1.33" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.33.tgz#1fa12a904472fafd068e48d9e8401f74d3f70edb" + integrity sha512-plLElXp7pRDd0bNZHw+nMd52vRYjLwQjygaNg7ddJ2uJtTlmnTCjWuPKxVu6//AdaRuME84SvLW91sIkBqGT0g== dependencies: - mime-db "1.52.0" + mime-db "1.50.0" minimatch@^3.0.4: version "3.0.4" diff --git a/package.json b/package.json index 6bf6edfaa6a..07e4e255449 100644 --- a/package.json +++ b/package.json @@ -56,9 +56,10 @@ "terser": "^5.7.0", "ts-morph": "^11.0.3", "ts-node": "~8.8.2", - "tslint": "^6.1.3", + "tslib": "^2.3.1", + "tslint": "~6.1.3", "tslint-no-circular-imports": "~0.7.0", - "typescript": "3.5.3" + "typescript": "4.4.2" }, "scripts": { "lint": "tslint -p tsconfig_tslint.json", diff --git a/tfjs-automl/package.json b/tfjs-automl/package.json index d6ab2e7dbbf..f7b8b68e0af 100644 --- a/tfjs-automl/package.json +++ b/tfjs-automl/package.json @@ -36,8 +36,8 @@ "karma": "~6.3.16", "karma-browserstack-launcher": "~1.6.0", "karma-chrome-launcher": "~2.2.0", - "karma-firefox-launcher": "~1.1.0", "karma-commonjs": "^1.0.0", + "karma-firefox-launcher": "~1.1.0", "karma-jasmine": "~2.0.0", "karma-safari-launcher": "~1.0.0", "karma-typescript": "~5.5.1", @@ -53,7 +53,7 @@ "ts-node": "^8.8.2", "tslint": "~6.1.3", "tslint-no-circular-imports": "^0.7.0", - "typescript": "3.5.3", + "typescript": "4.4.2", "yalc": "~1.0.0-pre.21" }, "peerDependencies": { diff --git a/tfjs-automl/yarn.lock b/tfjs-automl/yarn.lock index 1685fe71bc9..eba92f870f1 100644 --- a/tfjs-automl/yarn.lock +++ b/tfjs-automl/yarn.lock @@ -3648,10 +3648,10 @@ type-is@~1.6.17: media-typer "0.3.0" mime-types "~2.1.24" -typescript@3.5.3: - version "3.5.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977" - integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g== +typescript@4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.2.tgz#6d618640d430e3569a1dfb44f7d7e600ced3ee86" + integrity sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ== ua-parser-js@^0.7.30: version "0.7.31" diff --git a/tfjs-backend-cpu/package.json b/tfjs-backend-cpu/package.json index 56404879e70..0813e3acb00 100644 --- a/tfjs-backend-cpu/package.json +++ b/tfjs-backend-cpu/package.json @@ -42,7 +42,7 @@ "rollup-plugin-terser": "~5.3.0", "rollup-plugin-visualizer": "~3.3.2", "ts-node": "~8.8.2", - "typescript": "3.5.3", + "typescript": "4.4.2", "yalc": "~1.0.0-pre.50" }, "scripts": { diff --git a/tfjs-backend-cpu/yarn.lock b/tfjs-backend-cpu/yarn.lock index 14fbcfc98ef..5f5f643357f 100644 --- a/tfjs-backend-cpu/yarn.lock +++ b/tfjs-backend-cpu/yarn.lock @@ -3126,10 +3126,10 @@ typedarray-to-buffer@^3.1.5: dependencies: is-typedarray "^1.0.0" -typescript@3.5.3: - version "3.5.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977" - integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g== +typescript@4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.2.tgz#6d618640d430e3569a1dfb44f7d7e600ced3ee86" + integrity sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ== ua-parser-js@^0.7.30: version "0.7.31" diff --git a/tfjs-backend-wasm/package.json b/tfjs-backend-wasm/package.json index ead49f53de9..c4a0bfd74f3 100644 --- a/tfjs-backend-wasm/package.json +++ b/tfjs-backend-wasm/package.json @@ -74,9 +74,10 @@ "rollup-plugin-terser": "~7.0.2", "rollup-plugin-visualizer": "~3.3.2", "ts-node": "~8.8.2", + "tslib": "^2.3.1", "tslint": "~6.1.3", "tslint-no-circular-imports": "~0.7.0", - "typescript": "3.5.3", + "typescript": "4.4.2", "yalc": "~1.0.0-pre.50" }, "license": "Apache-2.0", diff --git a/tfjs-backend-wasm/yarn.lock b/tfjs-backend-wasm/yarn.lock index 9d29d89691c..2a962a47264 100644 --- a/tfjs-backend-wasm/yarn.lock +++ b/tfjs-backend-wasm/yarn.lock @@ -3395,6 +3395,11 @@ tslib@^1.13.0, tslib@^1.8.1: resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== +tslib@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.3.1.tgz#e8a335add5ceae51aa261d32a490158ef042ef01" + integrity sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw== + tslint-no-circular-imports@~0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/tslint-no-circular-imports/-/tslint-no-circular-imports-0.7.0.tgz#9df0a15654d66b172e0b7843eed073fa5ae99b5f" @@ -3439,10 +3444,10 @@ type-is@~1.6.17: media-typer "0.3.0" mime-types "~2.1.24" -typescript@3.5.3: - version "3.5.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977" - integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g== +typescript@4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.2.tgz#6d618640d430e3569a1dfb44f7d7e600ced3ee86" + integrity sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ== ua-parser-js@^0.7.30: version "0.7.31" diff --git a/tfjs-backend-webgl/package.json b/tfjs-backend-webgl/package.json index 306e3928376..336407d4bf5 100644 --- a/tfjs-backend-webgl/package.json +++ b/tfjs-backend-webgl/package.json @@ -42,7 +42,7 @@ "rollup-plugin-terser": "~7.0.2", "rollup-plugin-visualizer": "~3.3.2", "ts-node": "~7.0.0", - "typescript": "3.5.3", + "typescript": "4.4.2", "yalc": "~1.0.0-pre.50" }, "scripts": { diff --git a/tfjs-backend-webgl/yarn.lock b/tfjs-backend-webgl/yarn.lock index 5a46d2d88ec..44a51c16e4e 100644 --- a/tfjs-backend-webgl/yarn.lock +++ b/tfjs-backend-webgl/yarn.lock @@ -970,11 +970,6 @@ resolved "https://registry.npmjs.org/@types/jasmine/-/jasmine-3.0.0.tgz#9a6b6755a02fcd6baa088a767557709c79728f98" integrity sha512-yeQ81bQ46gOfj+AQLp5/x0Kylq6lz9d5a82Vo5JS63rDn1ctoItKcwrcKEM1wGsjqA4SrYkzzIHo8dbq8RhG5w== -"@types/long@^4.0.1": - version "4.0.1" - resolved "https://registry.yarnpkg.com/@types/long/-/long-4.0.1.tgz#459c65fa1867dafe6a8f322c4c51695663cc55e9" - integrity sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w== - "@types/node@*", "@types/node@>=10.0.0": version "15.12.4" resolved "https://registry.npmjs.org/@types/node/-/node-15.12.4.tgz#e1cf817d70a1e118e81922c4ff6683ce9d422e26" @@ -2626,11 +2621,6 @@ log4js@^6.3.0, log4js@^6.4.1: rfdc "^1.3.0" streamroller "^3.0.2" -long@4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/long/-/long-4.0.0.tgz#9a7b71cfb7d361a194ea555241c92f7468d5bf28" - integrity sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA== - magic-string@^0.25.2, magic-string@^0.25.7: version "0.25.7" resolved "https://registry.npmjs.org/magic-string/-/magic-string-0.25.7.tgz#3f497d6fd34c669c6798dcb821f2ef31f5445051" @@ -2763,13 +2753,6 @@ nice-try@^1.0.4: resolved "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== -node-fetch@~2.6.1: - version "2.6.7" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" - integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== - dependencies: - whatwg-url "^5.0.0" - node-releases@^1.1.71: version "1.1.73" resolved "https://registry.npmjs.org/node-releases/-/node-releases-1.1.73.tgz#dd4e81ddd5277ff846b80b52bb40c49edf7a7b20" @@ -3593,11 +3576,6 @@ toidentifier@1.0.0: resolved "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553" integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw== -tr46@~0.0.3: - version "0.0.3" - resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" - integrity sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o= - ts-node@~7.0.0: version "7.0.1" resolved "https://registry.npmjs.org/ts-node/-/ts-node-7.0.1.tgz#9562dc2d1e6d248d24bc55f773e3f614337d9baf" @@ -3625,10 +3603,10 @@ type-is@~1.6.17: media-typer "0.3.0" mime-types "~2.1.24" -typescript@3.5.3: - version "3.5.3" - resolved "https://registry.npmjs.org/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977" - integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g== +typescript@4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.2.tgz#6d618640d430e3569a1dfb44f7d7e600ced3ee86" + integrity sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ== ua-parser-js@^0.7.30: version "0.7.31" @@ -3743,19 +3721,6 @@ wcwidth@^1.0.1: dependencies: defaults "^1.0.3" -webidl-conversions@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" - integrity sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE= - -whatwg-url@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" - integrity sha1-lmRU6HZUYuN2RNNib2dCzotwll0= - dependencies: - tr46 "~0.0.3" - webidl-conversions "^3.0.0" - which-boxed-primitive@^1.0.2: version "1.0.2" resolved "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" diff --git a/tfjs-converter/package.json b/tfjs-converter/package.json index 9adb6941e92..86da06bd74b 100644 --- a/tfjs-converter/package.json +++ b/tfjs-converter/package.json @@ -50,7 +50,7 @@ "rollup-plugin-visualizer": "~3.3.2", "ts-morph": "^7.1.3", "ts-node": "~8.8.2", - "typescript": "3.5.3", + "typescript": "4.4.2", "yalc": "~1.0.0-pre.50" }, "scripts": { diff --git a/tfjs-converter/scripts/kernels_to_ops.ts b/tfjs-converter/scripts/kernels_to_ops.ts index bdbbce3a26f..ca689b6a535 100644 --- a/tfjs-converter/scripts/kernels_to_ops.ts +++ b/tfjs-converter/scripts/kernels_to_ops.ts @@ -98,16 +98,16 @@ function getKernelMappingForFile(source: SourceFile) { const callExprs = clausePart.getDescendantsOfKind(SyntaxKind.CallExpression); const tfOpsCallExprs = - callExprs.filter(expr => expr.getText().match(/tfOps/)); + callExprs.filter(expr => expr.getText().match(/ops/)); const tfSymbols: Set = new Set(); for (const tfOpsCall of tfOpsCallExprs) { const tfOpsCallStr = tfOpsCall.getText(); - const functionCallMatcher = /(tfOps\.([\w\.]*)\()/g; + const functionCallMatcher = /(ops\.([\w\.]*)\()/g; const matches = tfOpsCallStr.match(functionCallMatcher); if (matches != null && matches.length > 0) { for (const match of matches) { // extract the method name (and any namespaces used to call it) - const symbolMatcher = /(tfOps\.([\w\.]*)\()/; + const symbolMatcher = /(ops\.([\w\.]*)\()/; const symbol = match.match(symbolMatcher)[2]; tfSymbols.add(symbol); } diff --git a/tfjs-converter/src/BUILD.bazel b/tfjs-converter/src/BUILD.bazel index fd258aeaa4e..d6ced5f006f 100644 --- a/tfjs-converter/src/BUILD.bazel +++ b/tfjs-converter/src/BUILD.bazel @@ -21,6 +21,7 @@ package(default_visibility = ["//visibility:public"]) TEST_SRCS = [ "**/*_test.ts", "run_tests.ts", + "operations/executors/spy_ops.ts", ] # Used for test-snippets diff --git a/tfjs-converter/src/executor/graph_model.ts b/tfjs-converter/src/executor/graph_model.ts index 91c8d53489f..e47e046399c 100644 --- a/tfjs-converter/src/executor/graph_model.ts +++ b/tfjs-converter/src/executor/graph_model.ts @@ -44,6 +44,7 @@ export class GraphModel implements InferenceModel { private initializer: GraphExecutor; private resourceManager: ResourceManager; private signature: tensorflow.ISignatureDef; + private readonly io: typeof io; // Returns the version information for the tensorflow model GraphDef. get modelVersion(): string { @@ -89,7 +90,8 @@ export class GraphModel implements InferenceModel { */ constructor( private modelUrl: string|io.IOHandler, - private loadOptions: io.LoadOptions = {}) { + private loadOptions: io.LoadOptions = {}, tfio = io) { + this.io = tfio; if (loadOptions == null) { this.loadOptions = {}; } @@ -102,13 +104,16 @@ export class GraphModel implements InferenceModel { // Path is an IO Handler. this.handler = path as io.IOHandler; } else if (this.loadOptions.requestInit != null) { - this.handler = io.browserHTTPRequest(path as string, this.loadOptions); + this.handler = + this.io.browserHTTPRequest(path as string, this.loadOptions); } else { - const handlers = io.getLoadHandlers(path as string, this.loadOptions); + const handlers = + this.io.getLoadHandlers(path as string, this.loadOptions); if (handlers.length === 0) { // For backward compatibility: if no load handler can be found, // assume it is a relative http path. - handlers.push(io.browserHTTPRequest(path as string, this.loadOptions)); + handlers.push( + this.io.browserHTTPRequest(path as string, this.loadOptions)); } else if (handlers.length > 1) { throw new Error( `Found more than one (${handlers.length}) load handlers for ` + @@ -156,8 +161,8 @@ export class GraphModel implements InferenceModel { this.signature = signature; this.version = `${graph.versions.producer}.${graph.versions.minConsumer}`; - const weightMap = - io.decodeWeights(this.artifacts.weightData, this.artifacts.weightSpecs); + const weightMap = this.io.decodeWeights( + this.artifacts.weightData, this.artifacts.weightSpecs); this.executor = new GraphExecutor( OperationMapper.Instance.transformGraph(graph, this.signature)); this.executor.weightMap = this.convertTensorMapToTensorsMap(weightMap); @@ -228,7 +233,7 @@ export class GraphModel implements InferenceModel { async save(handlerOrURL: io.IOHandler|string, config?: io.SaveConfig): Promise { if (typeof handlerOrURL === 'string') { - const handlers = io.getSaveHandlers(handlerOrURL); + const handlers = this.io.getSaveHandlers(handlerOrURL); if (handlers.length === 0) { throw new Error( `Cannot find any save handlers for URL '${handlerOrURL}'`); @@ -437,8 +442,8 @@ export class GraphModel implements InferenceModel { * @doc {heading: 'Models', subheading: 'Loading'} */ export async function loadGraphModel( - modelUrl: string|io.IOHandler, - options: io.LoadOptions = {}): Promise { + modelUrl: string|io.IOHandler, options: io.LoadOptions = {}, + tfio = io): Promise { if (modelUrl == null) { throw new Error( 'modelUrl in loadGraphModel() cannot be null. Please provide a url ' + @@ -456,7 +461,7 @@ export async function loadGraphModel( modelUrl = `${modelUrl}${DEFAULT_MODEL_NAME}${TFHUB_SEARCH_PARAM}`; } } - const model = new GraphModel(modelUrl, options); + const model = new GraphModel(modelUrl, options, tfio); await model.load(); return model; } diff --git a/tfjs-converter/src/executor/graph_model_test.ts b/tfjs-converter/src/executor/graph_model_test.ts index 6ccd2bf193d..a82f0af7b02 100644 --- a/tfjs-converter/src/executor/graph_model_test.ts +++ b/tfjs-converter/src/executor/graph_model_test.ts @@ -23,6 +23,7 @@ import {deregisterOp, registerOp} from '../operations/custom_op/register'; import {GraphNode} from '../operations/types'; import {GraphModel, loadGraphModel} from './graph_model'; +import {RecursiveSpy, spyOnAllFunctions} from '../operations/executors/spy_ops'; const HOST = 'http://example.org'; const MODEL_URL = `${HOST}/model.json`; @@ -368,6 +369,12 @@ describe('loadSync', () => { }); describe('loadGraphModel', () => { + let spyIo: RecursiveSpy; + + beforeEach(() => { + spyIo = spyOnAllFunctions(io); + }); + it('Pass a custom io handler', async () => { const customLoader: tfc.io.IOHandler = { load: async () => { @@ -397,22 +404,25 @@ describe('loadGraphModel', () => { it('Pass a fetchFunc', async () => { const fetchFunc = () => {}; - spyOn(tfc.io, 'getLoadHandlers').and.returnValue([ + spyIo.getLoadHandlers.and.returnValue([ CUSTOM_HTTP_MODEL_LOADER ]); - await loadGraphModel(MODEL_URL, {fetchFunc}); - expect(tfc.io.getLoadHandlers).toHaveBeenCalledWith(MODEL_URL, {fetchFunc}); + await loadGraphModel(MODEL_URL, {fetchFunc}, spyIo); + expect(spyIo.getLoadHandlers).toHaveBeenCalledWith(MODEL_URL, {fetchFunc}); }); }); describe('Model', () => { + let spyIo: RecursiveSpy; + beforeEach(() => { - model = new GraphModel(MODEL_URL); + spyIo = spyOnAllFunctions(io); + model = new GraphModel(MODEL_URL, undefined, spyIo); }); describe('custom model', () => { beforeEach(() => { - spyOn(tfc.io, 'getLoadHandlers').and.returnValue([ + spyIo.getLoadHandlers.and.returnValue([ CUSTOM_HTTP_MODEL_LOADER ]); registerOp('CustomOp', (nodeValue: GraphNode) => { @@ -454,11 +464,10 @@ describe('Model', () => { describe('simple model', () => { beforeEach(() => { - spyOn(tfc.io, 'getLoadHandlers').and.returnValue([ + spyIo.getLoadHandlers.and.returnValue([ SIMPLE_HTTP_MODEL_LOADER ]); - spyOn(tfc.io, 'browserHTTPRequest') - .and.returnValue(SIMPLE_HTTP_MODEL_LOADER); + spyIo.browserHTTPRequest.and.returnValue(SIMPLE_HTTP_MODEL_LOADER); }); it('load', async () => { const loaded = await model.load(); @@ -591,7 +600,7 @@ describe('Model', () => { describe('dispose', () => { it('should dispose the weights', async () => { const numOfTensors = tfc.memory().numTensors; - model = new GraphModel(MODEL_URL); + model = new GraphModel(MODEL_URL, undefined, spyIo); await model.load(); model.dispose(); @@ -609,7 +618,7 @@ describe('Model', () => { describe('relative path', () => { beforeEach(() => { - model = new GraphModel(RELATIVE_MODEL_URL); + model = new GraphModel(RELATIVE_MODEL_URL, undefined, spyIo); }); it('load', async () => { @@ -619,14 +628,14 @@ describe('Model', () => { }); it('should loadGraphModel', async () => { - const model = await loadGraphModel(MODEL_URL); + const model = await loadGraphModel(MODEL_URL, undefined, spyIo); expect(model).not.toBeUndefined(); }); it('should loadGraphModel with request options', async () => { const model = await loadGraphModel( - MODEL_URL, {requestInit: {credentials: 'include'}}); - expect(tfc.io.browserHTTPRequest).toHaveBeenCalledWith(MODEL_URL, { + MODEL_URL, {requestInit: {credentials: 'include'}}, spyIo); + expect(spyIo.browserHTTPRequest).toHaveBeenCalledWith(MODEL_URL, { requestInit: {credentials: 'include'} }); expect(model).not.toBeUndefined(); @@ -634,7 +643,7 @@ describe('Model', () => { it('should call loadGraphModel for TfHub Module', async () => { const url = `${HOST}/model/1`; - const model = await loadGraphModel(url, {fromTFHub: true}); + const model = await loadGraphModel(url, {fromTFHub: true}, spyIo); expect(model).toBeDefined(); }); @@ -656,11 +665,10 @@ describe('Model', () => { describe('control flow model', () => { beforeEach(() => { - spyOn(tfc.io, 'getLoadHandlers').and.returnValue([ + spyIo.getLoadHandlers.and.returnValue([ CONTROL_FLOW_HTTP_MODEL_LOADER ]); - spyOn(tfc.io, 'browserHTTPRequest') - .and.returnValue(CONTROL_FLOW_HTTP_MODEL_LOADER); + spyIo.browserHTTPRequest.and.returnValue(CONTROL_FLOW_HTTP_MODEL_LOADER); }); describe('save', () => { @@ -747,11 +755,10 @@ describe('Model', () => { }; describe('dynamic shape model', () => { beforeEach(() => { - spyOn(tfc.io, 'getLoadHandlers').and.returnValue([ + spyIo.getLoadHandlers.and.returnValue([ DYNAMIC_HTTP_MODEL_LOADER ]); - spyOn(tfc.io, 'browserHTTPRequest') - .and.returnValue(DYNAMIC_HTTP_MODEL_LOADER); + spyIo.browserHTTPRequest.and.returnValue(DYNAMIC_HTTP_MODEL_LOADER); }); it('should throw error if call predict directly', async () => { @@ -792,11 +799,10 @@ describe('Model', () => { }); describe('dynamic shape model with metadata', () => { beforeEach(() => { - spyOn(tfc.io, 'getLoadHandlers').and.returnValue([ + spyIo.getLoadHandlers.and.returnValue([ DYNAMIC_HTTP_MODEL_NEW_LOADER ]); - spyOn(tfc.io, 'browserHTTPRequest') - .and.returnValue(DYNAMIC_HTTP_MODEL_NEW_LOADER); + spyIo.browserHTTPRequest.and.returnValue(DYNAMIC_HTTP_MODEL_NEW_LOADER); }); it('should be success if call executeAsync with signature key', @@ -818,11 +824,10 @@ describe('Model', () => { describe('Hashtable model', () => { beforeEach(() => { - spyOn(tfc.io, 'getLoadHandlers').and.returnValue([ + spyIo.getLoadHandlers.and.returnValue([ HASHTABLE_HTTP_MODEL_LOADER ]); - spyOn(tfc.io, 'browserHTTPRequest') - .and.returnValue(HASHTABLE_HTTP_MODEL_LOADER); + spyIo.browserHTTPRequest.and.returnValue(HASHTABLE_HTTP_MODEL_LOADER); }); it('should be successful if call executeAsync', async () => { await model.load(); diff --git a/tfjs-converter/src/operations/executors/arithmetic_executor.ts b/tfjs-converter/src/operations/executors/arithmetic_executor.ts index e69efeca892..35e8f590bfd 100644 --- a/tfjs-converter/src/operations/executors/arithmetic_executor.ts +++ b/tfjs-converter/src/operations/executors/arithmetic_executor.ts @@ -27,66 +27,66 @@ import {getParamValue} from './utils'; export const executeOp: InternalOpExecutor = (node: Node, tensorMap: NamedTensorsMap, - context: ExecutionContext): Tensor[] => { + context: ExecutionContext, ops = tfOps): Tensor[] => { switch (node.op) { case 'BiasAdd': case 'AddV2': case 'Add': { - return [tfOps.add( + return [ops.add( (getParamValue('a', node, tensorMap, context) as Tensor), getParamValue('b', node, tensorMap, context) as Tensor)]; } case 'AddN': { - return [tfOps.addN(( + return [ops.addN(( getParamValue('tensors', node, tensorMap, context) as Tensor[]))]; } case 'FloorMod': case 'Mod': - return [tfOps.mod( + return [ops.mod( getParamValue('a', node, tensorMap, context) as Tensor, getParamValue('b', node, tensorMap, context) as Tensor)]; case 'Mul': - return [tfOps.mul( + return [ops.mul( getParamValue('a', node, tensorMap, context) as Tensor, getParamValue('b', node, tensorMap, context) as Tensor)]; case 'RealDiv': case 'Div': { - return [tfOps.div( + return [ops.div( getParamValue('a', node, tensorMap, context) as Tensor, getParamValue('b', node, tensorMap, context) as Tensor)]; } case 'DivNoNan': { - return [tfOps.divNoNan( + return [ops.divNoNan( getParamValue('a', node, tensorMap, context) as Tensor, getParamValue('b', node, tensorMap, context) as Tensor)]; } case 'FloorDiv': { - return [tfOps.floorDiv( + return [ops.floorDiv( getParamValue('a', node, tensorMap, context) as Tensor, getParamValue('b', node, tensorMap, context) as Tensor)]; } case 'Sub': { - return [tfOps.sub( + return [ops.sub( getParamValue('a', node, tensorMap, context) as Tensor, getParamValue('b', node, tensorMap, context) as Tensor)]; } case 'Minimum': { - return [tfOps.minimum( + return [ops.minimum( getParamValue('a', node, tensorMap, context) as Tensor, getParamValue('b', node, tensorMap, context) as Tensor)]; } case 'Maximum': { - return [tfOps.maximum( + return [ops.maximum( getParamValue('a', node, tensorMap, context) as Tensor, getParamValue('b', node, tensorMap, context) as Tensor)]; } case 'Pow': { - return [tfOps.pow( + return [ops.pow( getParamValue('a', node, tensorMap, context) as Tensor, getParamValue('b', node, tensorMap, context) as Tensor)]; } case 'SquaredDifference': { - return [tfOps.squaredDifference( + return [ops.squaredDifference( getParamValue('a', node, tensorMap, context) as Tensor, getParamValue('b', node, tensorMap, context) as Tensor)]; } diff --git a/tfjs-converter/src/operations/executors/arithmetic_executor_test.ts b/tfjs-converter/src/operations/executors/arithmetic_executor_test.ts index eb23a1d2217..6b07ec6041b 100644 --- a/tfjs-converter/src/operations/executors/arithmetic_executor_test.ts +++ b/tfjs-converter/src/operations/executors/arithmetic_executor_test.ts @@ -23,7 +23,8 @@ import {ExecutionContext} from '../../executor/execution_context'; import {Node} from '../types'; import {executeOp} from './arithmetic_executor'; -import {createTensorAttr, createTensorsAttr} from './test_helper'; +import {createTensorAttr, createTensorsAttr, uncapitalize} from './test_helper'; +import {RecursiveSpy, spyOnAllFunctions} from './spy_ops'; describe('arithmetic', () => { let node: Node; @@ -46,37 +47,46 @@ describe('arithmetic', () => { }); describe('executeOp', () => { - ['Add', 'Mul', 'Div', 'Sub', 'Maximum', 'Minimum', 'Pow', - 'SquaredDifference', 'Mod', 'FloorDiv', 'DivNoNan'] - .forEach((op => { - it('should call tfOps.' + op, () => { - const spy = - spyOn(tfOps, op.charAt(0).toLowerCase() + op.slice(1) as 'add'); - node.op = op; - executeOp(node, {input1, input2}, context); + let spyOps: RecursiveSpy; + let spyOpsAsTfOps: typeof tfOps; - expect(spy).toHaveBeenCalledWith(input1[0], input2[0]); - }); - })); + beforeEach(() => { + spyOps = spyOnAllFunctions(tfOps); + spyOpsAsTfOps = spyOps as unknown as typeof tfOps; + }); + + (['Add', 'Mul', 'Div', 'Sub', 'Maximum', 'Minimum', 'Pow', + 'SquaredDifference', 'Mod', 'FloorDiv', 'DivNoNan'] as const) + .forEach((op => { + it('should call tfOps.' + op, () => { + node.op = op; + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); + + expect(spyOps[uncapitalize(op)]) + .toHaveBeenCalledWith(input1[0], input2[0]); + }); + })); it('AddV2', async () => { - const spy = spyOn(tfOps, 'add').and.callThrough(); + node.op = 'AddV2'; - const res = executeOp(node, {input1, input2}, context) as Tensor[]; - expect(spy).toHaveBeenCalledWith(input1[0], input2[0]); + const res = executeOp(node, {input1, input2}, context, + spyOpsAsTfOps) as Tensor[]; + expect(spyOps.add).toHaveBeenCalledWith(input1[0], input2[0]); expect(res[0].dtype).toBe('float32'); expect(res[0].shape).toEqual([]); test_util.expectArraysClose(await res[0].data(), 2); }); it('AddN', async () => { - const spy = spyOn(tfOps, 'addN').and.callThrough(); node.op = 'AddN'; node.inputParams = {tensors: createTensorsAttr(0, 0)}; node.inputNames = ['input1', 'input2', 'input3']; const res = - executeOp(node, {input1, input2, input3}, context) as Tensor[]; - expect(spy).toHaveBeenCalledWith([input1[0], input2[0], input3[0]]); + executeOp(node, {input1, input2, input3}, context, + spyOpsAsTfOps) as Tensor[]; + expect(spyOps.addN) + .toHaveBeenCalledWith([input1[0], input2[0], input3[0]]); expect(res[0].dtype).toBe('float32'); expect(res[0].shape).toEqual([]); test_util.expectArraysClose(await res[0].data(), [6]); diff --git a/tfjs-converter/src/operations/executors/basic_math_executor.ts b/tfjs-converter/src/operations/executors/basic_math_executor.ts index 0751d95a2aa..6b0db9aa173 100644 --- a/tfjs-converter/src/operations/executors/basic_math_executor.ts +++ b/tfjs-converter/src/operations/executors/basic_math_executor.ts @@ -27,153 +27,153 @@ import {getParamValue, getTensor} from './utils'; export const executeOp: InternalOpExecutor = (node: Node, tensorMap: NamedTensorsMap, - context: ExecutionContext): Tensor[] => { + context: ExecutionContext, ops = tfOps): Tensor[] => { switch (node.op) { case 'Abs': case 'ComplexAbs': - return [tfOps.abs( + return [ops.abs( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'Acos': - return [tfOps.acos( + return [ops.acos( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'Acosh': - return [tfOps.acosh( + return [ops.acosh( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'Asin': - return [tfOps.asin( + return [ops.asin( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'Asinh': - return [tfOps.asinh( + return [ops.asinh( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'Atan': - return [tfOps.atan( + return [ops.atan( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'Atan2': - return [tfOps.atan2( + return [ops.atan2( getParamValue('x', node, tensorMap, context) as Tensor, getParamValue('y', node, tensorMap, context) as Tensor)]; case 'Atanh': - return [tfOps.atanh( + return [ops.atanh( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'Ceil': - return [tfOps.ceil( + return [ops.ceil( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'Complex': - return [tfOps.complex( + return [ops.complex( getParamValue('real', node, tensorMap, context) as Tensor, getParamValue('imag', node, tensorMap, context) as Tensor)]; case 'Cos': - return [tfOps.cos( + return [ops.cos( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'Cosh': - return [tfOps.cosh( + return [ops.cosh( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'Elu': - return [tfOps.elu( + return [ops.elu( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'Erf': - return [tfOps.erf( + return [ops.erf( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'Exp': - return [tfOps.exp( + return [ops.exp( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'Expm1': { - return [tfOps.expm1( + return [ops.expm1( getParamValue('x', node, tensorMap, context) as Tensor)]; } case 'Floor': - return [tfOps.floor( + return [ops.floor( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'Log': - return [tfOps.log( + return [ops.log( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'Log1p': { - return [tfOps.log1p( + return [ops.log1p( getParamValue('x', node, tensorMap, context) as Tensor)]; } case 'Imag': - return [tfOps.imag( + return [ops.imag( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'Neg': - return [tfOps.neg( + return [ops.neg( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'Reciprocal': { - return [tfOps.reciprocal( + return [ops.reciprocal( getParamValue('x', node, tensorMap, context) as Tensor)]; } case 'Real': - return [tfOps.real( + return [ops.real( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'Relu': - return [tfOps.relu( + return [ops.relu( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'Round': { - return [tfOps.round( + return [ops.round( getParamValue('x', node, tensorMap, context) as Tensor)]; } case 'Selu': - return [tfOps.selu( + return [ops.selu( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'Sigmoid': - return [tfOps.sigmoid( + return [ops.sigmoid( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'Sin': - return [tfOps.sin( + return [ops.sin( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'Sign': { - return [tfOps.sign( + return [ops.sign( getParamValue('x', node, tensorMap, context) as Tensor)]; } case 'Sinh': { - return [tfOps.sinh( + return [ops.sinh( getParamValue('x', node, tensorMap, context) as Tensor)]; } case 'Softplus': { - return [tfOps.softplus( + return [ops.softplus( getParamValue('x', node, tensorMap, context) as Tensor)]; } case 'Sqrt': { - return [tfOps.sqrt( + return [ops.sqrt( getParamValue('x', node, tensorMap, context) as Tensor)]; } case 'Square': { - return [tfOps.square( + return [ops.square( getParamValue('x', node, tensorMap, context) as Tensor)]; } case 'Tanh': { - return [tfOps.tanh( + return [ops.tanh( getParamValue('x', node, tensorMap, context) as Tensor)]; } case 'Tan': - return [tfOps.tan( + return [ops.tan( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'ClipByValue': - return [tfOps.clipByValue( + return [ops.clipByValue( getParamValue('x', node, tensorMap, context) as Tensor, getParamValue('clipValueMin', node, tensorMap, context) as number, getParamValue('clipValueMax', node, tensorMap, context) as number)]; case 'Relu6': - return [tfOps.relu6( + return [ops.relu6( getParamValue('x', node, tensorMap, context) as Tensor)]; case 'Rsqrt': - return [tfOps.rsqrt( + return [ops.rsqrt( getTensor(node.inputNames[0], tensorMap, context))]; case 'Prod': - return [tfOps.prod( + return [ops.prod( getParamValue('x', node, tensorMap, context) as Tensor, getParamValue('axes', node, tensorMap, context) as number[])]; case 'LeakyRelu': - return [tfOps.leakyRelu( + return [ops.leakyRelu( getParamValue('x', node, tensorMap, context) as Tensor, getParamValue('alpha', node, tensorMap, context) as number)]; case 'Prelu': - return [tfOps.prelu( + return [ops.prelu( getParamValue('x', node, tensorMap, context) as Tensor, getParamValue('alpha', node, tensorMap, context) as Tensor)]; case 'IsNan': - return [tfOps.isNaN( + return [ops.isNaN( getTensor(node.inputNames[0], tensorMap, context))]; default: throw TypeError(`Node type ${node.op} is not implemented`); diff --git a/tfjs-converter/src/operations/executors/basic_math_executor_test.ts b/tfjs-converter/src/operations/executors/basic_math_executor_test.ts index 124b804e8a0..138d0a19e50 100644 --- a/tfjs-converter/src/operations/executors/basic_math_executor_test.ts +++ b/tfjs-converter/src/operations/executors/basic_math_executor_test.ts @@ -23,7 +23,8 @@ import * as basic_math from '../op_list/basic_math'; import {Node} from '../types'; import {executeOp} from './basic_math_executor'; -import {createNumberAttr, createNumberAttrFromIndex, createNumericArrayAttrFromIndex, createTensorAttr, validateParam} from './test_helper'; +import {RecursiveSpy, spyOnAllFunctions} from './spy_ops'; +import {createNumberAttr, createNumberAttrFromIndex, createNumericArrayAttrFromIndex, createTensorAttr, uncapitalize, validateParam} from './test_helper'; describe('basic math', () => { let node: Node; @@ -44,18 +45,28 @@ describe('basic math', () => { }); describe('executeOp', () => { - ['Abs', 'Acos', 'Asin', 'Atan', 'Ceil', 'Cos', 'Cosh', 'Elu', 'Exp', - 'Floor', 'Log', 'Imag', 'Neg', 'Real', 'Relu', 'Selu', 'Sigmoid', 'Sin', - 'Sinh', 'Sqrt', 'Square', 'Tanh', 'Tan', 'Sign', 'Round', 'Expm1', 'Log1p', - 'Reciprocal', 'Softplus', 'Asinh', 'Acosh', 'Atanh', 'Erf'] + let spyOps: RecursiveSpy; + let spyOpsAsTfOps: typeof tfOps; + + beforeEach(() => { + spyOps = spyOnAllFunctions(tfOps); + spyOpsAsTfOps = spyOps as unknown as typeof tfOps; + }); + + ([ + 'Abs', 'Acos', 'Asin', 'Atan', 'Ceil', 'Cos', 'Cosh', + 'Elu', 'Exp', 'Floor', 'Log', 'Imag', 'Neg', 'Real', + 'Relu', 'Selu', 'Sigmoid', 'Sin', 'Sinh', 'Sqrt', 'Square', + 'Tanh', 'Tan', 'Sign', 'Round', 'Expm1', 'Log1p', 'Reciprocal', + 'Softplus', 'Asinh', 'Acosh', 'Atanh', 'Erf' + ] as const ) .forEach(op => { it('should call tfOps.' + op, () => { - const spy = - spyOn(tfOps, op.charAt(0).toLowerCase() + op.slice(1) as 'abs'); node.op = op; - executeOp(node, {input1}, context); + spyOps[uncapitalize(op)].and.returnValue({}); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(spy).toHaveBeenCalledWith(input1[0]); + expect(spyOps[uncapitalize(op)]).toHaveBeenCalledWith(input1[0]); }); it('should match op def', () => { node.op = op; @@ -65,12 +76,11 @@ describe('basic math', () => { }); describe('Relu6', () => { it('should call tfOps.relu6', () => { - spyOn(tfOps, 'relu6'); node.op = 'Relu6'; - executeOp(node, {input1}, context); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.relu6).toHaveBeenCalledWith(input1[0]); + expect(spyOps.relu6).toHaveBeenCalledWith(input1[0]); }); it('should match op def', () => { node.op = 'Relu6'; @@ -80,16 +90,15 @@ describe('basic math', () => { }); describe('ClipByValue', () => { it('should call tfOps.clipByValue', () => { - spyOn(tfOps, 'clipByValue'); node.op = 'ClipByValue'; node.inputNames = ['input1', 'input2', 'input3']; node.inputParams['clipValueMin'] = createNumberAttrFromIndex(1); node.inputParams['clipValueMax'] = createNumberAttrFromIndex(2); const input2 = [tfOps.scalar(2)]; const input3 = [tfOps.scalar(3)]; - executeOp(node, {input1, input2, input3}, context); + executeOp(node, {input1, input2, input3}, context, spyOpsAsTfOps); - expect(tfOps.clipByValue).toHaveBeenCalledWith(input1[0], 2, 3); + expect(spyOps.clipByValue).toHaveBeenCalledWith(input1[0], 2, 3); }); it('should match op def', () => { node.op = 'ClipByValue'; @@ -101,14 +110,14 @@ describe('basic math', () => { }); describe('Prod', () => { it('should call tfOps.prod', () => { - spyOn(tfOps, 'prod'); node.op = 'Prod'; node.inputParams['axes'] = createNumericArrayAttrFromIndex(1); node.inputNames = ['input1', 'input2']; const input2 = [tfOps.tensor1d([2])]; - executeOp(node, {input1, input2}, context); + spyOps.prod.and.returnValue({}); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.prod).toHaveBeenCalledWith(input1[0], [2]); + expect(spyOps.prod).toHaveBeenCalledWith(input1[0], [2]); }); it('should match op def', () => { node.op = 'Prod'; @@ -121,10 +130,9 @@ describe('basic math', () => { it('should call tfOps.rsqrt', () => { const input1 = [tfOps.scalar(1)]; node.op = 'Rsqrt'; - spyOn(tfOps, 'rsqrt').and.returnValue(input1); - executeOp(node, {input1}, context); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.rsqrt).toHaveBeenCalledWith(input1[0]); + expect(spyOps.rsqrt).toHaveBeenCalledWith(input1[0]); }); it('should match op def', () => { node.op = 'Rsqrt'; @@ -134,13 +142,12 @@ describe('basic math', () => { }); describe('LeakyRelu', () => { it('should call tfOps.leakyRelu', () => { - spyOn(tfOps, 'leakyRelu'); node.op = 'LeakyRelu'; node.attrParams['alpha'] = createNumberAttr(1); node.inputNames = ['input1']; - executeOp(node, {input1}, context); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.leakyRelu).toHaveBeenCalledWith(input1[0], 1); + expect(spyOps.leakyRelu).toHaveBeenCalledWith(input1[0], 1); }); it('should match op def', () => { node.op = 'LeakyRelu'; @@ -150,15 +157,14 @@ describe('basic math', () => { }); describe('Prelu', () => { it('should call tfOps.Prelu', () => { - spyOn(tfOps, 'prelu'); node.op = 'Prelu'; node.inputParams['x'] = createTensorAttr(0); node.inputParams['alpha'] = createTensorAttr(1); node.inputNames = ['input1', 'input2']; const input2 = [tfOps.scalar(1)]; - executeOp(node, {input1, input2}, context); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.prelu).toHaveBeenCalledWith(input1[0], input2[0]); + expect(spyOps.prelu).toHaveBeenCalledWith(input1[0], input2[0]); }); it('should match op def', () => { node.op = 'Prelu'; @@ -169,14 +175,13 @@ describe('basic math', () => { }); describe('Atan2', () => { it('should call tfOps.atan2', () => { - spyOn(tfOps, 'atan2'); node.op = 'Atan2'; node.inputParams['y'] = createTensorAttr(1); node.inputNames = ['input1', 'input2']; const input2 = [tfOps.scalar(2)]; - executeOp(node, {input1, input2}, context); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.atan2).toHaveBeenCalledWith(input1[0], input2[0]); + expect(spyOps.atan2).toHaveBeenCalledWith(input1[0], input2[0]); }); it('should match op def', () => { node.op = 'Atan2'; @@ -187,12 +192,11 @@ describe('basic math', () => { }); describe('ComplexAbs', () => { it('should call tfOps.abs', () => { - spyOn(tfOps, 'abs'); node.op = 'ComplexAbs'; node.inputNames = ['input1']; - executeOp(node, {input1}, context); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.abs).toHaveBeenCalledWith(input1[0]); + expect(spyOps.abs).toHaveBeenCalledWith(input1[0]); }); it('should match op def', () => { node.op = 'ComplexAbs'; @@ -202,7 +206,6 @@ describe('basic math', () => { }); describe('Complex', () => { it('should call tfOps.complex', () => { - spyOn(tfOps, 'complex'); node.op = 'Complex'; node.inputParams = { real: createTensorAttr(0), @@ -210,9 +213,9 @@ describe('basic math', () => { }; const input2 = [tfOps.scalar(2)]; node.inputNames = ['input1', 'input2']; - executeOp(node, {input1, input2}, context); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.complex).toHaveBeenCalledWith(input1[0], input2[0]); + expect(spyOps.complex).toHaveBeenCalledWith(input1[0], input2[0]); }); it('should match op def', () => { node.op = 'Complex'; @@ -226,12 +229,11 @@ describe('basic math', () => { }); describe('IsNan', () => { it('should call tfOps.isNaN', () => { - spyOn(tfOps, 'isNaN'); node.op = 'IsNan'; - executeOp(node, {input1}, context); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.isNaN).toHaveBeenCalledWith(input1[0]); + expect(spyOps.isNaN).toHaveBeenCalledWith(input1[0]); }); it('should match op def', () => { node.op = 'IsNan'; diff --git a/tfjs-converter/src/operations/executors/convolution_executor.ts b/tfjs-converter/src/operations/executors/convolution_executor.ts index 2d33ff14036..18658dc0636 100644 --- a/tfjs-converter/src/operations/executors/convolution_executor.ts +++ b/tfjs-converter/src/operations/executors/convolution_executor.ts @@ -83,7 +83,7 @@ function fusedConvAndDepthWiseParams( export const executeOp: InternalOpExecutor = (node: Node, tensorMap: NamedTensorsMap, - context: ExecutionContext): Tensor[] => { + context: ExecutionContext, ops = tfOps): Tensor[] => { switch (node.op) { case 'Conv1D': { const stride = @@ -94,7 +94,7 @@ export const executeOp: InternalOpExecutor = .toUpperCase(); const dilation = getParamValue('dilation', node, tensorMap, context) as number; - return [tfOps.conv1d( + return [ops.conv1d( getParamValue('x', node, tensorMap, context) as Tensor3D, getParamValue('filter', node, tensorMap, context) as Tensor3D, stride, pad as 'valid' | 'same', dataFormat as 'NWC' | 'NCW', @@ -109,7 +109,7 @@ export const executeOp: InternalOpExecutor = .toUpperCase(); const dilations = getParamValue('dilations', node, tensorMap, context) as number[]; - return [tfOps.conv2d( + return [ops.conv2d( getParamValue('x', node, tensorMap, context) as Tensor3D | Tensor4D, getParamValue('filter', node, tensorMap, context) as Tensor4D, @@ -128,7 +128,7 @@ export const executeOp: InternalOpExecutor = leakyreluAlpha } = fusedConvAndDepthWiseParams(node, tensorMap, context); - return [tfOps.fused.conv2d({ + return [ops.fused.conv2d({ x: getParamValue('x', node, tensorMap, context) as Tensor3D | Tensor4D, filter: getParamValue('filter', node, tensorMap, context) as @@ -156,7 +156,7 @@ export const executeOp: InternalOpExecutor = leakyreluAlpha, } = fusedConvAndDepthWiseParams(node, tensorMap, context); - return [tfOps.fused.depthwiseConv2d({ + return [ops.fused.depthwiseConv2d({ x: getParamValue('x', node, tensorMap, context) as Tensor3D | Tensor4D, filter: getParamValue('filter', node, tensorMap, context) as @@ -180,7 +180,7 @@ export const executeOp: InternalOpExecutor = const stride = getParamValue('strides', node, tensorMap, context) as number[]; const pad = getPadding(node, tensorMap, context); - return [tfOps.conv2dTranspose( + return [ops.conv2dTranspose( getParamValue('x', node, tensorMap, context) as Tensor3D | Tensor4D, getParamValue('filter', node, tensorMap, context) as Tensor4D, @@ -197,7 +197,7 @@ export const executeOp: InternalOpExecutor = (getParamValue('dataFormat', node, tensorMap, context) as string) .toUpperCase(); - return [tfOps.depthwiseConv2d( + return [ops.depthwiseConv2d( getParamValue('input', node, tensorMap, context) as Tensor3D | Tensor4D, getParamValue('filter', node, tensorMap, context) as Tensor4D, @@ -213,7 +213,7 @@ export const executeOp: InternalOpExecutor = .toUpperCase(); const dilations = getParamValue('dilations', node, tensorMap, context) as number[]; - return [tfOps.conv3d( + return [ops.conv3d( getParamValue('x', node, tensorMap, context) as Tensor4D | Tensor, getParamValue('filter', node, tensorMap, context) as @@ -229,7 +229,7 @@ export const executeOp: InternalOpExecutor = const kernelSize = getParamValue('kernelSize', node, tensorMap, context) as number[]; - return [tfOps.avgPool( + return [ops.avgPool( getParamValue('x', node, tensorMap, context) as Tensor3D | Tensor4D, [kernelSize[1], kernelSize[2]], [stride[1], stride[2]], @@ -242,7 +242,7 @@ export const executeOp: InternalOpExecutor = const kernelSize = getParamValue('kernelSize', node, tensorMap, context) as number[]; - return [tfOps.maxPool( + return [ops.maxPool( getParamValue('x', node, tensorMap, context) as Tensor3D | Tensor4D, [kernelSize[1], kernelSize[2]], [stride[1], stride[2]], @@ -257,7 +257,7 @@ export const executeOp: InternalOpExecutor = const includeBatchInIndex = getParamValue('includeBatchInIndex', node, tensorMap, context) as boolean; - const {result, indexes} = tfOps.maxPoolWithArgmax( + const {result, indexes} = ops.maxPoolWithArgmax( getParamValue('x', node, tensorMap, context) as Tensor4D, [kernelSize[1], kernelSize[2]], [stride[1], stride[2]], pad as 'valid' | 'same', includeBatchInIndex); @@ -270,7 +270,7 @@ export const executeOp: InternalOpExecutor = const kernelSize = getParamValue('kernelSize', node, tensorMap, context) as number[]; - return [tfOps.avgPool3d( + return [ops.avgPool3d( getParamValue('x', node, tensorMap, context) as Tensor5D, [kernelSize[1], kernelSize[2], kernelSize[3]], [stride[1], stride[2], stride[3]], pad as 'valid' | 'same')]; @@ -283,7 +283,7 @@ export const executeOp: InternalOpExecutor = const kernelSize = getParamValue('kernelSize', node, tensorMap, context) as number[]; - return [tfOps.maxPool3d( + return [ops.maxPool3d( getParamValue('x', node, tensorMap, context) as Tensor5D, [kernelSize[1], kernelSize[2], kernelSize[3]], [stride[1], stride[2], stride[3]], pad as 'valid' | 'same')]; @@ -304,7 +304,7 @@ export const executeOp: InternalOpExecutor = const dilationHeight = dilations[1]; const dilationWidth = dilations[2]; - return [tfOps.dilation2d( + return [ops.dilation2d( getParamValue('x', node, tensorMap, context) as Tensor3D | Tensor4D, getParamValue('filter', node, tensorMap, context) as Tensor3D, diff --git a/tfjs-converter/src/operations/executors/convolution_executor_test.ts b/tfjs-converter/src/operations/executors/convolution_executor_test.ts index 65553cd4542..ef3c3c8e3e6 100644 --- a/tfjs-converter/src/operations/executors/convolution_executor_test.ts +++ b/tfjs-converter/src/operations/executors/convolution_executor_test.ts @@ -21,6 +21,7 @@ import {ExecutionContext} from '../../executor/execution_context'; import {Node} from '../types'; import {executeOp} from './convolution_executor'; +import {RecursiveSpy} from './spy_ops'; import {createNumberAttr, createNumericArrayAttr, createStrArrayAttr, createStrAttr, createTensorAttr, createTensorsAttr} from './test_helper'; import {createBoolAttr} from './test_helper'; @@ -29,6 +30,9 @@ describe('convolution', () => { const input = [tfOps.scalar(1)]; const context = new ExecutionContext({}, {}, {}); + let spyOps: RecursiveSpy; + let spyOpsAsTfOps: typeof tfOps; + beforeEach(() => { node = { name: 'test', @@ -40,41 +44,53 @@ describe('convolution', () => { attrParams: {}, children: [] }; + spyOps = + Object.fromEntries(Object.keys(tfOps).map((op: keyof typeof tfOps) => { + if (op === 'fused') { + return [ + op, { + conv2d: jasmine.createSpy(op), + depthwiseConv2d: jasmine.createSpy(op), + matMul: jasmine.createSpy(op), + } + ]; + } + const spy = jasmine.createSpy(op); + return [op, spy] as const ; + })) as unknown as typeof spyOps; + spyOpsAsTfOps = spyOps as unknown as typeof tfOps; }); describe('executeOp', () => { describe('AvgPool', () => { it('should call tfOps.avgPool', () => { - spyOn(tfOps, 'avgPool'); node.op = 'AvgPool'; node.attrParams['strides'] = createNumericArrayAttr([1, 2, 2, 1]); node.attrParams['pad'] = createStrAttr('same'); node.attrParams['kernelSize'] = createNumericArrayAttr([1, 2, 2, 1]); - executeOp(node, {input}, context); + executeOp(node, {input}, context, spyOpsAsTfOps); - expect(tfOps.avgPool) + expect(spyOps.avgPool) .toHaveBeenCalledWith(input[0], [2, 2], [2, 2], 'same'); }); }); describe('maxPool', () => { it('should call tfOps.maxPool', () => { - spyOn(tfOps, 'maxPool'); node.op = 'MaxPool'; node.attrParams['strides'] = createNumericArrayAttr([1, 2, 2, 1]); node.attrParams['pad'] = createStrAttr('same'); node.attrParams['kernelSize'] = createNumericArrayAttr([1, 2, 2, 1]); - executeOp(node, {input}, context); + executeOp(node, {input}, context, spyOpsAsTfOps); - expect(tfOps.maxPool) + expect(spyOps.maxPool) .toHaveBeenCalledWith(input[0], [2, 2], [2, 2], 'same'); }); }); describe('Conv2d', () => { it('should call tfOps.conv2d', () => { - spyOn(tfOps, 'conv2d'); node.op = 'Conv2D'; node.inputParams['filter'] = createTensorAttr(1); node.attrParams['strides'] = createNumericArrayAttr([1, 2, 2, 1]); @@ -86,14 +102,13 @@ describe('convolution', () => { const input2 = [tfOps.scalar(1.0)]; node.inputNames = ['input1', 'input2']; - executeOp(node, {input1, input2}, context); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.conv2d) + expect(spyOps.conv2d) .toHaveBeenCalledWith( input1[0], input2[0], [2, 2], 'same', 'NHWC', [2, 2]); }); it('should support explicit padding', () => { - spyOn(tfOps, 'conv2d'); node.op = 'Conv2D'; node.inputParams['filter'] = createTensorAttr(1); node.attrParams['strides'] = createNumericArrayAttr([1, 2, 2, 1]); @@ -107,9 +122,9 @@ describe('convolution', () => { const input2 = [tfOps.scalar(1.0)]; node.inputNames = ['input1', 'input2']; - executeOp(node, {input1, input2}, context); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.conv2d) + expect(spyOps.conv2d) .toHaveBeenCalledWith( input1[0], input2[0], [2, 2], [[0, 0], [1, 1], [2, 2], [0, 0]], 'NHWC', [2, 2]); @@ -117,7 +132,6 @@ describe('convolution', () => { }); describe('Conv2DBackpropInput', () => { it('should call tfOps.conv2dTranspose', () => { - spyOn(tfOps, 'conv2dTranspose'); node.op = 'Conv2DBackpropInput'; node.attrParams['outputShape'] = createNumericArrayAttr([1, 2, 2, 2]); node.inputParams['filter'] = createTensorAttr(1); @@ -128,14 +142,13 @@ describe('convolution', () => { const input2 = [tfOps.scalar(1.0)]; node.inputNames = ['input1', 'input2']; - executeOp(node, {input1, input2}, context); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.conv2dTranspose) + expect(spyOps.conv2dTranspose) .toHaveBeenCalledWith( input1[0], input2[0], [1, 2, 2, 2], [2, 2], 'same'); }); it('should support explicit padding', () => { - spyOn(tfOps, 'conv2dTranspose'); node.op = 'Conv2DBackpropInput'; node.attrParams['outputShape'] = createNumericArrayAttr([1, 2, 2, 2]); node.inputParams['filter'] = createTensorAttr(1); @@ -148,9 +161,9 @@ describe('convolution', () => { const input2 = [tfOps.scalar(1.0)]; node.inputNames = ['input1', 'input2']; - executeOp(node, {input1, input2}, context); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.conv2dTranspose) + expect(spyOps.conv2dTranspose) .toHaveBeenCalledWith( input1[0], input2[0], @@ -162,7 +175,6 @@ describe('convolution', () => { }); describe('Conv1D', () => { it('should call tfOps.conv1d', () => { - spyOn(tfOps, 'conv1d'); node.op = 'Conv1D'; node.category = 'convolution'; node.inputParams['filter'] = createTensorAttr(1); @@ -175,16 +187,15 @@ describe('convolution', () => { const input2 = [tfOps.scalar(1.0)]; node.inputNames = ['input1', 'input2']; - executeOp(node, {input1, input2}, context); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.conv1d) + expect(spyOps.conv1d) .toHaveBeenCalledWith(input1[0], input2[0], 1, 'same', 'NWC', 1); }); }); describe('DepthwiseConv2d', () => { it('should call tfOps.depthwiseConv2d', () => { - spyOn(tfOps, 'depthwiseConv2d'); node.op = 'DepthwiseConv2d'; node.category = 'convolution'; node.inputParams['input'] = createTensorAttr(0); @@ -197,14 +208,13 @@ describe('convolution', () => { const input2 = [tfOps.scalar(1.0)]; node.inputNames = ['input1', 'input2']; - executeOp(node, {input1, input2}, context); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.depthwiseConv2d) + expect(spyOps.depthwiseConv2d) .toHaveBeenCalledWith( input1[0], input2[0], [2, 2], 'same', 'NHWC', [2, 2]); }); it('support explicit padding', () => { - spyOn(tfOps, 'depthwiseConv2d'); node.op = 'DepthwiseConv2d'; node.category = 'convolution'; node.inputParams['input'] = createTensorAttr(0); @@ -219,9 +229,9 @@ describe('convolution', () => { const input2 = [tfOps.scalar(1.0)]; node.inputNames = ['input1', 'input2']; - executeOp(node, {input1, input2}, context); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.depthwiseConv2d) + expect(spyOps.depthwiseConv2d) .toHaveBeenCalledWith( input1[0], input2[0], [2, 2], [[0, 0], [1, 1], [2, 2], [0, 0]], 'NHWC', [2, 2]); @@ -230,7 +240,6 @@ describe('convolution', () => { describe('Conv3d', () => { it('should call tfOps.conv3d', () => { - spyOn(tfOps, 'conv3d'); node.op = 'Conv3D'; node.category = 'convolution'; node.inputParams['filter'] = createTensorAttr(1); @@ -243,9 +252,9 @@ describe('convolution', () => { const input2 = [tfOps.scalar(1.0)]; node.inputNames = ['input1', 'input2']; - executeOp(node, {input1, input2}, context); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.conv3d) + expect(spyOps.conv3d) .toHaveBeenCalledWith( input1[0], input2[0], [2, 2, 2], 'same', 'NHWC', [2, 2, 2]); }); @@ -253,53 +262,52 @@ describe('convolution', () => { describe('AvgPool3D', () => { it('should call tfOps.avgPool3d', () => { - spyOn(tfOps, 'avgPool3d'); node.op = 'AvgPool3D'; node.attrParams['strides'] = createNumericArrayAttr([1, 2, 2, 2, 1]); node.attrParams['pad'] = createStrAttr('same'); node.attrParams['kernelSize'] = createNumericArrayAttr([1, 2, 2, 2, 1]); - executeOp(node, {input}, context); + executeOp(node, {input}, context, spyOpsAsTfOps); - expect(tfOps.avgPool3d) + expect(spyOps.avgPool3d) .toHaveBeenCalledWith(input[0], [2, 2, 2], [2, 2, 2], 'same'); }); }); describe('MaxPool3D', () => { it('should call tfOps.maxPool3d', () => { - spyOn(tfOps, 'maxPool3d'); node.op = 'MaxPool3D'; node.attrParams['strides'] = createNumericArrayAttr([1, 2, 2, 2, 1]); node.attrParams['pad'] = createStrAttr('same'); node.attrParams['kernelSize'] = createNumericArrayAttr([1, 2, 2, 2, 1]); - executeOp(node, {input}, context); + executeOp(node, {input}, context, spyOpsAsTfOps); - expect(tfOps.maxPool3d) + expect(spyOps.maxPool3d) .toHaveBeenCalledWith(input[0], [2, 2, 2], [2, 2, 2], 'same'); }); }); describe('MaxPoolWithArgmax', () => { it('should call tfOps.maxPoolWithArgmax', () => { - spyOn(tfOps, 'maxPoolWithArgmax').and.returnValue({}); node.op = 'MaxPoolWithArgmax'; node.attrParams['strides'] = createNumericArrayAttr([1, 2, 2, 1]); node.attrParams['pad'] = createStrAttr('same'); node.attrParams['kernelSize'] = createNumericArrayAttr([1, 2, 2, 1]); node.attrParams['dataFormat'] = createStrAttr('NDHWC'); node.attrParams['includeBatchInIndex'] = createBoolAttr(true); - executeOp(node, {input}, context); + spyOps.maxPoolWithArgmax.and.returnValue( + {result: 'fake', indexes: 'fake'}); + + executeOp(node, {input}, context, spyOpsAsTfOps); - expect(tfOps.maxPoolWithArgmax) + expect(spyOps.maxPoolWithArgmax) .toHaveBeenCalledWith(input[0], [2, 2], [2, 2], 'same', true); }); }); describe('_FusedConv2d', () => { it('with bias and activation func', () => { - spyOn(tfOps.fused, 'conv2d'); node.op = '_FusedConv2D'; node.inputParams['filter'] = createTensorAttr(1); node.inputParams['args'] = createTensorsAttr(2, 0); @@ -314,9 +322,9 @@ describe('convolution', () => { const input3 = [tfOps.scalar(3.0)]; node.inputNames = ['input1', 'input2', 'input3']; - executeOp(node, {input1, input2, input3}, context); + executeOp(node, {input1, input2, input3}, context, spyOpsAsTfOps); - expect(tfOps.fused.conv2d).toHaveBeenCalledWith({ + expect(spyOps.fused.conv2d).toHaveBeenCalledWith({ x: input1[0], filter: input2[0], strides: [2, 2], @@ -330,7 +338,6 @@ describe('convolution', () => { }); }); it('should support explicit padding', () => { - spyOn(tfOps.fused, 'conv2d'); node.op = '_FusedConv2D'; node.inputParams['filter'] = createTensorAttr(1); node.inputParams['args'] = createTensorsAttr(2, 0); @@ -347,9 +354,9 @@ describe('convolution', () => { const input3 = [tfOps.scalar(3.0)]; node.inputNames = ['input1', 'input2', 'input3']; - executeOp(node, {input1, input2, input3}, context); + executeOp(node, {input1, input2, input3}, context, spyOpsAsTfOps); - expect(tfOps.fused.conv2d).toHaveBeenCalledWith({ + expect(spyOps.fused.conv2d).toHaveBeenCalledWith({ x: input1[0], filter: input2[0], strides: [2, 2], @@ -363,7 +370,6 @@ describe('convolution', () => { }); }); it('with bias and prelu activation func', () => { - spyOn(tfOps.fused, 'conv2d'); node.op = '_FusedConv2D'; node.inputParams['filter'] = createTensorAttr(1); node.inputParams['args'] = createTensorsAttr(2, 0); @@ -378,9 +384,10 @@ describe('convolution', () => { const input3 = [tfOps.scalar(3.0)]; const input4 = [tfOps.scalar(4.0)]; node.inputNames = ['input1', 'input2', 'input3', 'input4']; - executeOp(node, {input1, input2, input3, input4}, context); + executeOp( + node, {input1, input2, input3, input4}, context, spyOpsAsTfOps); - expect(tfOps.fused.conv2d).toHaveBeenCalledWith({ + expect(spyOps.fused.conv2d).toHaveBeenCalledWith({ x: input1[0], filter: input2[0], strides: [2, 2], @@ -394,7 +401,6 @@ describe('convolution', () => { }); }); it('with bias and leakyrelu activation func', () => { - spyOn(tfOps.fused, 'conv2d'); node.op = '_FusedConv2D'; node.inputParams['filter'] = createTensorAttr(1); node.inputParams['args'] = createTensorsAttr(2, 0); @@ -410,9 +416,9 @@ describe('convolution', () => { const input2 = [tfOps.scalar(2.0)]; const input3 = [tfOps.scalar(3.0)]; node.inputNames = ['input1', 'input2', 'input3']; - executeOp(node, {input1, input2, input3}, context); + executeOp(node, {input1, input2, input3}, context, spyOpsAsTfOps); - expect(tfOps.fused.conv2d).toHaveBeenCalledWith({ + expect(spyOps.fused.conv2d).toHaveBeenCalledWith({ x: input1[0], filter: input2[0], strides: [2, 2], @@ -427,7 +433,6 @@ describe('convolution', () => { }); it('bias add', () => { - spyOn(tfOps.fused, 'conv2d'); node.op = '_FusedConv2D'; node.inputParams['filter'] = createTensorAttr(1); node.inputParams['args'] = createTensorsAttr(2, 0); @@ -442,9 +447,9 @@ describe('convolution', () => { const input3 = [tfOps.scalar(3.0)]; node.inputNames = ['input1', 'input2', 'input3']; - executeOp(node, {input1, input2, input3}, context); + executeOp(node, {input1, input2, input3}, context, spyOpsAsTfOps); - expect(tfOps.fused.conv2d).toHaveBeenCalledWith({ + expect(spyOps.fused.conv2d).toHaveBeenCalledWith({ x: input1[0], filter: input2[0], strides: [2, 2], @@ -458,7 +463,6 @@ describe('convolution', () => { }); }); it('fail with batchnorm', () => { - spyOn(tfOps.fused, 'conv2d'); node.op = '_FusedConv2D'; node.inputParams['filter'] = createTensorAttr(1); node.inputParams['args'] = createTensorsAttr(2, 0); @@ -473,14 +477,15 @@ describe('convolution', () => { const input3 = [tfOps.scalar(3.0)]; node.inputNames = ['input1', 'input2', 'input3']; - expect(() => executeOp(node, {input1, input2, input3}, context)) + expect( + () => executeOp( + node, {input1, input2, input3}, context, spyOpsAsTfOps)) .toThrow(); }); }); }); describe('FusedDepthwiseConv2d', () => { it('support explicit padding', () => { - spyOn(tfOps.fused, 'depthwiseConv2d'); node.op = 'FusedDepthwiseConv2dNative'; node.inputParams['filter'] = createTensorAttr(1); node.inputParams['args'] = createTensorsAttr(2, 0); @@ -497,9 +502,9 @@ describe('convolution', () => { const input3 = [tfOps.scalar(3.0)]; node.inputNames = ['input1', 'input2', 'input3']; - executeOp(node, {input1, input2, input3}, context); + executeOp(node, {input1, input2, input3}, context, spyOpsAsTfOps); - expect(tfOps.fused.depthwiseConv2d).toHaveBeenCalledWith({ + expect(spyOps.fused.depthwiseConv2d).toHaveBeenCalledWith({ x: input1[0], filter: input2[0], strides: [2, 2], @@ -513,7 +518,6 @@ describe('convolution', () => { }); }); it('with only activation func', () => { - spyOn(tfOps.fused, 'depthwiseConv2d'); node.op = 'FusedDepthwiseConv2dNative'; node.inputParams['filter'] = createTensorAttr(1); node.inputParams['args'] = createTensorsAttr(2, 0); @@ -527,9 +531,9 @@ describe('convolution', () => { const input2 = [tfOps.scalar(2.0)]; const input3 = [tfOps.scalar(3.0)]; node.inputNames = ['input1', 'input2', 'input3']; - executeOp(node, {input1, input2, input3}, context); + executeOp(node, {input1, input2, input3}, context, spyOpsAsTfOps); - expect(tfOps.fused.depthwiseConv2d).toHaveBeenCalledWith({ + expect(spyOps.fused.depthwiseConv2d).toHaveBeenCalledWith({ x: input1[0], filter: input2[0], strides: [2, 2], @@ -543,7 +547,6 @@ describe('convolution', () => { }); }); it('with bias and activation func', () => { - spyOn(tfOps.fused, 'depthwiseConv2d'); node.op = 'FusedDepthwiseConv2dNative'; node.inputParams['filter'] = createTensorAttr(1); node.inputParams['args'] = createTensorsAttr(2, 0); @@ -558,9 +561,9 @@ describe('convolution', () => { const input3 = [tfOps.scalar(3.0)]; node.inputNames = ['input1', 'input2', 'input3']; - executeOp(node, {input1, input2, input3}, context); + executeOp(node, {input1, input2, input3}, context, spyOpsAsTfOps); - expect(tfOps.fused.depthwiseConv2d).toHaveBeenCalledWith({ + expect(spyOps.fused.depthwiseConv2d).toHaveBeenCalledWith({ x: input1[0], filter: input2[0], strides: [2, 2], @@ -574,7 +577,6 @@ describe('convolution', () => { }); }); it('with bias and prelu activation func', () => { - spyOn(tfOps.fused, 'depthwiseConv2d'); node.op = 'FusedDepthwiseConv2dNative'; node.inputParams['filter'] = createTensorAttr(1); node.inputParams['args'] = createTensorsAttr(2, 0); @@ -589,9 +591,9 @@ describe('convolution', () => { const input3 = [tfOps.scalar(3.0)]; const input4 = [tfOps.scalar(4.0)]; node.inputNames = ['input1', 'input2', 'input3', 'input4']; - executeOp(node, {input1, input2, input3, input4}, context); + executeOp(node, {input1, input2, input3, input4}, context, spyOpsAsTfOps); - expect(tfOps.fused.depthwiseConv2d).toHaveBeenCalledWith({ + expect(spyOps.fused.depthwiseConv2d).toHaveBeenCalledWith({ x: input1[0], filter: input2[0], strides: [2, 2], @@ -605,7 +607,6 @@ describe('convolution', () => { }); }); it('with bias and leakyrelu activation func', () => { - spyOn(tfOps.fused, 'depthwiseConv2d'); node.op = 'FusedDepthwiseConv2dNative'; node.inputParams['filter'] = createTensorAttr(1); node.inputParams['args'] = createTensorsAttr(2, 0); @@ -621,9 +622,9 @@ describe('convolution', () => { const input2 = [tfOps.scalar(2.0)]; const input3 = [tfOps.scalar(3.0)]; node.inputNames = ['input1', 'input2', 'input3']; - executeOp(node, {input1, input2, input3}, context); + executeOp(node, {input1, input2, input3}, context, spyOpsAsTfOps); - expect(tfOps.fused.depthwiseConv2d).toHaveBeenCalledWith({ + expect(spyOps.fused.depthwiseConv2d).toHaveBeenCalledWith({ x: input1[0], filter: input2[0], strides: [2, 2], @@ -638,7 +639,6 @@ describe('convolution', () => { }); it('bias add', () => { - spyOn(tfOps.fused, 'depthwiseConv2d'); node.op = 'FusedDepthwiseConv2dNative'; node.inputParams['filter'] = createTensorAttr(1); node.inputParams['args'] = createTensorsAttr(2, 0); @@ -653,9 +653,9 @@ describe('convolution', () => { const input3 = [tfOps.scalar(3.0)]; node.inputNames = ['input1', 'input2', 'input3']; - executeOp(node, {input1, input2, input3}, context); + executeOp(node, {input1, input2, input3}, context, spyOpsAsTfOps); - expect(tfOps.fused.depthwiseConv2d).toHaveBeenCalledWith({ + expect(spyOps.fused.depthwiseConv2d).toHaveBeenCalledWith({ x: input1[0], filter: input2[0], strides: [2, 2], @@ -672,7 +672,6 @@ describe('convolution', () => { describe('dilation2d', () => { it('should call tfOps.dilation2d', () => { - spyOn(tfOps, 'dilation2d'); node.op = 'Dilation2D'; node.inputParams['filter'] = createTensorAttr(1); node.attrParams['strides'] = createNumericArrayAttr([1, 1, 1, 1]); @@ -683,9 +682,9 @@ describe('convolution', () => { const input2 = [tfOps.scalar(1.0)]; node.inputNames = ['input1', 'input2']; - executeOp(node, {input1, input2}, context); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.dilation2d) + expect(spyOps.dilation2d) .toHaveBeenCalledWith( input1[0], input2[0], [1, 1], 'same', [2, 2], 'NHWC'); }); diff --git a/tfjs-converter/src/operations/executors/creation_executor.ts b/tfjs-converter/src/operations/executors/creation_executor.ts index be6e9ed2428..d235b060214 100644 --- a/tfjs-converter/src/operations/executors/creation_executor.ts +++ b/tfjs-converter/src/operations/executors/creation_executor.ts @@ -27,7 +27,7 @@ import {getParamValue} from './utils'; export const executeOp: InternalOpExecutor = (node: Node, tensorMap: NamedTensorsMap, - context: ExecutionContext): Tensor[] => { + context: ExecutionContext, ops = tfOps): Tensor[] => { switch (node.op) { case 'Fill': { const shape = @@ -36,7 +36,7 @@ export const executeOp: InternalOpExecutor = getParamValue('dtype', node, tensorMap, context) as DataType; const value = getParamValue('value', node, tensorMap, context) as number; - return [tfOps.fill(shape, value, dtype)]; + return [ops.fill(shape, value, dtype)]; } case 'LinSpace': { const start = @@ -44,7 +44,7 @@ export const executeOp: InternalOpExecutor = const stop = getParamValue('stop', node, tensorMap, context) as number; const num = getParamValue('num', node, tensorMap, context) as number; - return [tfOps.linspace(start, stop, num)]; + return [ops.linspace(start, stop, num)]; } case 'Multinomial': { const logits = @@ -53,7 +53,7 @@ export const executeOp: InternalOpExecutor = getParamValue('numSamples', node, tensorMap, context) as number; const seed = getParamValue('seed', node, tensorMap, context) as number; - return [tfOps.multinomial(logits, numSamples, seed)]; + return [ops.multinomial(logits, numSamples, seed)]; } case 'OneHot': { const indices = @@ -64,19 +64,19 @@ export const executeOp: InternalOpExecutor = getParamValue('onValue', node, tensorMap, context) as number; const offValue = getParamValue('offValue', node, tensorMap, context) as number; - return [tfOps.oneHot(indices, depth, onValue, offValue)]; + return [ops.oneHot(indices, depth, onValue, offValue)]; } case 'Ones': { - return [tfOps.ones( + return [ops.ones( getParamValue('shape', node, tensorMap, context) as number[], getParamValue('dtype', node, tensorMap, context) as DataType)]; } case 'OnesLike': { - return [tfOps.onesLike( + return [ops.onesLike( getParamValue('x', node, tensorMap, context) as Tensor)]; } case 'RandomUniform': { - return [tfOps.randomUniform( + return [ops.randomUniform( // tslint:disable-next-line:no-any getParamValue('shape', node, tensorMap, context) as any, getParamValue('minval', node, tensorMap, context) as number, @@ -90,7 +90,7 @@ export const executeOp: InternalOpExecutor = getParamValue('stop', node, tensorMap, context) as number; const step = getParamValue('step', node, tensorMap, context) as number; - return [tfOps.range( + return [ops.range( start, stop, step, getParamValue('dtype', node, tensorMap, context) as 'float32' | 'int32')]; @@ -104,19 +104,19 @@ export const executeOp: InternalOpExecutor = getParamValue('stdDev', node, tensorMap, context) as number; const seed = getParamValue('seed', node, tensorMap, context) as number; - return [tfOps.truncatedNormal( + return [ops.truncatedNormal( shape, mean, stdDev, getParamValue('dtype', node, tensorMap, context) as 'float32' | 'int32', seed)]; } case 'Zeros': { - return [tfOps.zeros( + return [ops.zeros( getParamValue('shape', node, tensorMap, context) as number[], getParamValue('dtype', node, tensorMap, context) as DataType)]; } case 'ZerosLike': { - return [tfOps.zerosLike( + return [ops.zerosLike( getParamValue('x', node, tensorMap, context) as Tensor)]; } default: diff --git a/tfjs-converter/src/operations/executors/creation_executor_test.ts b/tfjs-converter/src/operations/executors/creation_executor_test.ts index cfe30817011..39afa57631b 100644 --- a/tfjs-converter/src/operations/executors/creation_executor_test.ts +++ b/tfjs-converter/src/operations/executors/creation_executor_test.ts @@ -23,14 +23,20 @@ import {Node} from '../types'; import {executeOp} from './creation_executor'; import {createDtypeAttr, createNumberAttr, createNumberAttrFromIndex, createNumericArrayAttrFromIndex, createTensorAttr, validateParam} from './test_helper'; +import {spyOnAllFunctions, RecursiveSpy} from './spy_ops'; describe('creation', () => { let node: Node; const input1 = [tfOps.tensor1d([1, 2, 3])]; const input2 = [tfOps.scalar(1)]; const context = new ExecutionContext({}, {}, {}); + let spyOps: RecursiveSpy; + let spyOpsAsTfOps: typeof tfOps; beforeEach(() => { + spyOps = spyOnAllFunctions(tfOps); + spyOpsAsTfOps = spyOps as unknown as typeof tfOps; + node = { name: 'test', op: '', @@ -46,15 +52,14 @@ describe('creation', () => { describe('executeOp', () => { describe('Fill', () => { it('should call tfOps.fill', () => { - spyOn(tfOps, 'fill'); node.op = 'Fill'; node.inputParams['shape'] = createNumericArrayAttrFromIndex(0); node.inputParams['value'] = createNumberAttrFromIndex(1); node.attrParams['dtype'] = createDtypeAttr('int32'); - executeOp(node, {input1, input2}, context); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.fill).toHaveBeenCalledWith([1, 2, 3], 1, 'int32'); + expect(spyOps.fill).toHaveBeenCalledWith([1, 2, 3], 1, 'int32'); }); it('should match json def', () => { node.op = 'Fill'; @@ -67,7 +72,6 @@ describe('creation', () => { }); describe('LinSpace', () => { it('should call tfOps.linspace', () => { - spyOn(tfOps, 'linspace'); node.op = 'LinSpace'; node.inputParams['start'] = createNumberAttrFromIndex(0); node.inputParams['stop'] = createNumberAttrFromIndex(1); @@ -75,9 +79,9 @@ describe('creation', () => { node.inputNames = ['input', 'input2', 'input3']; const input = [tfOps.scalar(0)]; const input3 = [tfOps.scalar(2)]; - executeOp(node, {input, input2, input3}, context); + executeOp(node, {input, input2, input3}, context, spyOpsAsTfOps); - expect(tfOps.linspace).toHaveBeenCalledWith(0, 1, 2); + expect(spyOps.linspace).toHaveBeenCalledWith(0, 1, 2); }); it('should match json def', () => { node.op = 'LinSpace'; @@ -90,7 +94,6 @@ describe('creation', () => { }); describe('OneHot', () => { it('should call tfOps.oneHot', () => { - spyOn(tfOps, 'oneHot'); node.op = 'OneHot'; node.inputParams['indices'] = createTensorAttr(0); node.inputParams['depth'] = createNumberAttrFromIndex(1); @@ -100,9 +103,11 @@ describe('creation', () => { const input = [tfOps.tensor1d([0])]; const input3 = [tfOps.scalar(2)]; const input4 = [tfOps.scalar(3)]; - executeOp(node, {input, input2, input3, input4}, context); + spyOps.oneHot.and.returnValue({}); + executeOp(node, {input, input2, input3, input4}, context, + spyOpsAsTfOps); - expect(tfOps.oneHot).toHaveBeenCalledWith(input[0], 1, 2, 3); + expect(spyOps.oneHot).toHaveBeenCalledWith(input[0], 1, 2, 3); }); it('should match json def', () => { node.op = 'OneHot'; @@ -116,13 +121,12 @@ describe('creation', () => { }); describe('Ones', () => { it('should call tfOps.ones', () => { - spyOn(tfOps, 'ones'); node.op = 'Ones'; node.inputParams['shape'] = createNumericArrayAttrFromIndex(0); node.attrParams['dtype'] = createDtypeAttr('float32'); - executeOp(node, {input1}, context); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.ones).toHaveBeenCalledWith([1, 2, 3], 'float32'); + expect(spyOps.ones).toHaveBeenCalledWith([1, 2, 3], 'float32'); }); it('should match json def', () => { node.op = 'Ones'; @@ -134,12 +138,11 @@ describe('creation', () => { }); describe('OnesLike', () => { it('should call tfOps.onesLike', () => { - spyOn(tfOps, 'onesLike'); node.op = 'OnesLike'; node.inputParams['x'] = createTensorAttr(0); - executeOp(node, {input1}, context); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.onesLike).toHaveBeenCalledWith(input1[0]); + expect(spyOps.onesLike).toHaveBeenCalledWith(input1[0]); }); it('should match json def', () => { node.op = 'OnesLike'; @@ -150,7 +153,6 @@ describe('creation', () => { }); describe('Range', () => { it('should call tfOps.range', () => { - spyOn(tfOps, 'range'); node.op = 'Range'; node.inputParams['start'] = createNumberAttrFromIndex(0); node.inputParams['stop'] = createNumberAttrFromIndex(1); @@ -159,9 +161,9 @@ describe('creation', () => { node.inputNames = ['input', 'input2', 'input3']; const input = [tfOps.scalar(0)]; const input3 = [tfOps.scalar(2)]; - executeOp(node, {input, input2, input3}, context); + executeOp(node, {input, input2, input3}, context, spyOpsAsTfOps); - expect(tfOps.range).toHaveBeenCalledWith(0, 1, 2, 'float32'); + expect(spyOps.range).toHaveBeenCalledWith(0, 1, 2, 'float32'); }); it('should match json def', () => { node.op = 'Range'; @@ -175,7 +177,6 @@ describe('creation', () => { }); describe('RandomUniform', () => { it('should call tfOps.randomUniform', () => { - spyOn(tfOps, 'randomUniform'); node.op = 'RandomUniform'; node.inputParams['shape'] = createNumericArrayAttrFromIndex(0); node.inputNames = ['input1']; @@ -184,9 +185,9 @@ describe('creation', () => { node.attrParams['dtype'] = createDtypeAttr('float32'); node.attrParams['seed'] = createNumberAttr(0); - executeOp(node, {input1}, context); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.randomUniform) + expect(spyOps.randomUniform) .toHaveBeenCalledWith([1, 2, 3], 0, 1, 'float32'); }); it('should match json def', () => { @@ -203,7 +204,6 @@ describe('creation', () => { }); describe('TruncatedNormal', () => { it('should call tfOps.truncatedNormal', () => { - spyOn(tfOps, 'truncatedNormal'); node.op = 'TruncatedNormal'; node.inputParams['shape'] = createNumericArrayAttrFromIndex(0); node.inputNames = ['input1']; @@ -212,9 +212,9 @@ describe('creation', () => { node.attrParams['dtype'] = createDtypeAttr('float32'); node.attrParams['seed'] = createNumberAttr(0); - executeOp(node, {input1}, context); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.truncatedNormal) + expect(spyOps.truncatedNormal) .toHaveBeenCalledWith([1, 2, 3], 0, 1, 'float32', 0); }); it('should match json def', () => { @@ -231,13 +231,12 @@ describe('creation', () => { }); describe('Zeros', () => { it('should call tfOps.zeros', () => { - spyOn(tfOps, 'zeros'); node.op = 'Zeros'; node.inputParams['shape'] = createNumericArrayAttrFromIndex(0); node.attrParams['dtype'] = createDtypeAttr('float32'); - executeOp(node, {input1}, context); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.zeros).toHaveBeenCalledWith([1, 2, 3], 'float32'); + expect(spyOps.zeros).toHaveBeenCalledWith([1, 2, 3], 'float32'); }); it('should match json def', () => { node.op = 'Zeros'; @@ -248,12 +247,11 @@ describe('creation', () => { }); describe('ZerosLike', () => { it('should call tfOps.zerosLike', () => { - spyOn(tfOps, 'zerosLike'); node.op = 'ZerosLike'; node.inputParams['x'] = createTensorAttr(0); - executeOp(node, {input1}, context); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.zerosLike).toHaveBeenCalledWith(input1[0]); + expect(spyOps.zerosLike).toHaveBeenCalledWith(input1[0]); }); it('should match json def', () => { node.op = 'ZerosLike'; @@ -263,14 +261,13 @@ describe('creation', () => { }); describe('Multinomial', () => { it('should call tfOps.multinomial', () => { - spyOn(tfOps, 'multinomial'); node.op = 'Multinomial'; node.inputParams['logits'] = createTensorAttr(0); node.inputParams['numSamples'] = createNumberAttrFromIndex(1); node.attrParams['seed'] = createNumberAttr(2); - executeOp(node, {input1, input2}, context); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.multinomial).toHaveBeenCalledWith(input1[0], 1, 2); + expect(spyOps.multinomial).toHaveBeenCalledWith(input1[0], 1, 2); }); it('should match json def', () => { node.op = 'Multinomial'; diff --git a/tfjs-converter/src/operations/executors/dynamic_executor.ts b/tfjs-converter/src/operations/executors/dynamic_executor.ts index 083071dc47e..801ec2ebf8b 100644 --- a/tfjs-converter/src/operations/executors/dynamic_executor.ts +++ b/tfjs-converter/src/operations/executors/dynamic_executor.ts @@ -21,6 +21,7 @@ import * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter'; import {NamedTensorsMap} from '../../data/types'; import {ExecutionContext} from '../../executor/execution_context'; +import { ResourceManager } from '../../executor/resource_manager'; import {InternalOpAsyncExecutor, Node} from '../types'; import {getParamValue} from './utils'; @@ -50,7 +51,8 @@ function nmsParams( export const executeOp: InternalOpAsyncExecutor = async( node: Node, tensorMap: NamedTensorsMap, - context: ExecutionContext): Promise => { + context: ExecutionContext, resourceManager: ResourceManager, + ops = tfOps): Promise => { switch (node.op) { case 'NonMaxSuppressionV5': { const { @@ -62,7 +64,7 @@ export const executeOp: InternalOpAsyncExecutor = async( softNmsSigma } = nmsParams(node, tensorMap, context); - const result = await tfOps.image.nonMaxSuppressionWithScoreAsync( + const result = await ops.image.nonMaxSuppressionWithScoreAsync( boxes as Tensor2D, scores as Tensor1D, maxOutputSize, iouThreshold, scoreThreshold, softNmsSigma); @@ -76,7 +78,7 @@ export const executeOp: InternalOpAsyncExecutor = async( getParamValue('padToMaxOutputSize', node, tensorMap, context) as boolean; - const result = await tfOps.image.nonMaxSuppressionPaddedAsync( + const result = await ops.image.nonMaxSuppressionPaddedAsync( boxes as Tensor2D, scores as Tensor1D, maxOutputSize, iouThreshold, scoreThreshold, padToMaxOutputSize); @@ -87,20 +89,20 @@ export const executeOp: InternalOpAsyncExecutor = async( const {boxes, scores, maxOutputSize, iouThreshold, scoreThreshold} = nmsParams(node, tensorMap, context); - return [await tfOps.image.nonMaxSuppressionAsync( + return [await ops.image.nonMaxSuppressionAsync( boxes as Tensor2D, scores as Tensor1D, maxOutputSize, iouThreshold, scoreThreshold)]; } case 'Where': { - const condition = tfOps.cast( + const condition = ops.cast( (getParamValue('condition', node, tensorMap, context) as Tensor), 'bool'); - const result = [await tfOps.whereAsync(condition)]; + const result = [await ops.whereAsync(condition)]; condition.dispose(); return result; } case 'ListDiff': { - return tfOps.setdiff1dAsync( + return ops.setdiff1dAsync( getParamValue('x', node, tensorMap, context) as Tensor, getParamValue('y', node, tensorMap, context) as Tensor); } diff --git a/tfjs-converter/src/operations/executors/dynamic_executor_test.ts b/tfjs-converter/src/operations/executors/dynamic_executor_test.ts index b0bd30d6e7d..5e052d62943 100644 --- a/tfjs-converter/src/operations/executors/dynamic_executor_test.ts +++ b/tfjs-converter/src/operations/executors/dynamic_executor_test.ts @@ -23,14 +23,19 @@ import * as dynamic from '../op_list/dynamic'; import {Node} from '../types'; import {executeOp} from './dynamic_executor'; +import {RecursiveSpy, spyOnAllFunctions} from './spy_ops'; import {createBoolAttr, createNumberAttrFromIndex, createTensorAttr, validateParam} from './test_helper'; describe('dynamic', () => { let node: Node; const input1 = [tfOps.tensor1d([1])]; const context = new ExecutionContext({}, {}, {}); + let spyOps: RecursiveSpy; + let spyOpsAsTfOps: typeof tfOps; beforeEach(() => { + spyOps = spyOnAllFunctions(tfOps); + spyOpsAsTfOps = spyOps as unknown as typeof tfOps; node = { name: 'input1', op: '', @@ -57,10 +62,12 @@ describe('dynamic', () => { const input3 = [tfOps.tensor1d([1])]; const input4 = [tfOps.tensor1d([1])]; const input5 = [tfOps.tensor1d([1])]; - spyOn(tfOps.image, 'nonMaxSuppressionAsync'); - const result = - executeOp(node, {input1, input2, input3, input4, input5}, context); - expect(tfOps.image.nonMaxSuppressionAsync) + spyOps.image.nonMaxSuppressionAsync.and.returnValue({}); + + const result = executeOp( + node, {input1, input2, input3, input4, input5}, context, undefined, + spyOpsAsTfOps); + expect(spyOps.image.nonMaxSuppressionAsync) .toHaveBeenCalledWith(input1[0], input2[0], 1, 1, 1); expect(result instanceof Promise).toBeTruthy(); }); @@ -90,10 +97,12 @@ describe('dynamic', () => { const input3 = [tfOps.tensor1d([1])]; const input4 = [tfOps.tensor1d([1])]; const input5 = [tfOps.tensor1d([1])]; - spyOn(tfOps.image, 'nonMaxSuppressionAsync'); - const result = - executeOp(node, {input1, input2, input3, input4, input5}, context); - expect(tfOps.image.nonMaxSuppressionAsync) + spyOps.image.nonMaxSuppressionAsync.and.returnValue({}); + + const result = executeOp( + node, {input1, input2, input3, input4, input5}, context, undefined, + spyOpsAsTfOps); + expect(spyOps.image.nonMaxSuppressionAsync) .toHaveBeenCalledWith(input1[0], input2[0], 1, 1, 1); expect(result instanceof Promise).toBeTruthy(); }); @@ -125,10 +134,13 @@ describe('dynamic', () => { const input3 = [tfOps.tensor1d([1])]; const input4 = [tfOps.tensor1d([1])]; const input5 = [tfOps.tensor1d([1])]; - spyOn(tfOps.image, 'nonMaxSuppressionPaddedAsync').and.returnValue({}); - const result = - executeOp(node, {input1, input2, input3, input4, input5}, context); - expect(tfOps.image.nonMaxSuppressionPaddedAsync) + + spyOps.image.nonMaxSuppressionPaddedAsync.and.returnValue({}); + + const result = executeOp( + node, {input1, input2, input3, input4, input5}, context, undefined, + spyOpsAsTfOps); + expect(spyOps.image.nonMaxSuppressionPaddedAsync) .toHaveBeenCalledWith(input1[0], input2[0], 1, 1, 1, true); expect(result instanceof Promise).toBeTruthy(); }); @@ -163,11 +175,11 @@ describe('dynamic', () => { const input4 = [tfOps.tensor1d([1])]; const input5 = [tfOps.tensor1d([1])]; const input6 = [tfOps.tensor1d([1])]; - spyOn(tfOps.image, 'nonMaxSuppressionWithScoreAsync') - .and.returnValue({}); + spyOps.image.nonMaxSuppressionWithScoreAsync.and.returnValue({}); const result = executeOp( - node, {input1, input2, input3, input4, input5, input6}, context); - expect(tfOps.image.nonMaxSuppressionWithScoreAsync) + node, {input1, input2, input3, input4, input5, input6}, context, + undefined, spyOpsAsTfOps); + expect(spyOps.image.nonMaxSuppressionWithScoreAsync) .toHaveBeenCalledWith(input1[0], input2[0], 1, 1, 1, 1); expect(result instanceof Promise).toBeTruthy(); }); @@ -192,16 +204,13 @@ describe('dynamic', () => { node.op = 'Where'; node.inputParams = {'condition': createTensorAttr(0)}; const input1 = [tfOps.scalar(1)]; - spyOn(tfOps, 'whereAsync'); + // spyOn(tfOps, 'whereAsync'); - const result = executeOp(node, {input1}, context); - expect( - (tfOps.whereAsync as jasmine.Spy).calls.mostRecent().args[0].dtype) + const result = + executeOp(node, {input1}, context, undefined, spyOpsAsTfOps); + expect(spyOps.whereAsync.calls.mostRecent().args[0].dtype) .toEqual('bool'); - expect((tfOps.whereAsync as jasmine.Spy) - .calls.mostRecent() - .args[0] - .arraySync()) + expect(spyOps.whereAsync.calls.mostRecent().args[0].arraySync()) .toEqual(1); expect(result instanceof Promise).toBeTruthy(); }); @@ -215,7 +224,6 @@ describe('dynamic', () => { node.op = 'Where'; node.inputParams = {'condition': createTensorAttr(0)}; const input1 = [tfOps.scalar(1)]; - spyOn(tfOps, 'whereAsync').and.callThrough(); const prevCount = memory().numTensors; await executeOp(node, {input1}, context); @@ -231,10 +239,12 @@ describe('dynamic', () => { node.inputParams = {'x': createTensorAttr(0), 'y': createTensorAttr(1)}; const input1 = [tfOps.scalar(1)]; const input2 = [tfOps.scalar(1)]; - spyOn(tfOps, 'setdiff1dAsync'); + spyOps.setdiff1dAsync.and.returnValue({}); - const result = executeOp(node, {input1, input2}, context); - expect(tfOps.setdiff1dAsync).toHaveBeenCalledWith(input1[0], input2[0]); + const result = executeOp( + node, {input1, input2}, context, undefined, spyOpsAsTfOps); + expect(spyOps.setdiff1dAsync) + .toHaveBeenCalledWith(input1[0], input2[0]); expect(result instanceof Promise).toBeTruthy(); }); it('should match json def', () => { diff --git a/tfjs-converter/src/operations/executors/evaluation_executor.ts b/tfjs-converter/src/operations/executors/evaluation_executor.ts index 5a46646155e..fd6ceba54c3 100644 --- a/tfjs-converter/src/operations/executors/evaluation_executor.ts +++ b/tfjs-converter/src/operations/executors/evaluation_executor.ts @@ -26,7 +26,8 @@ import {InternalOpExecutor, Node} from '../types'; import {getParamValue} from './utils'; export const executeOp: InternalOpExecutor = - (node: Node, tensorMap: NamedTensorsMap, context: ExecutionContext): + (node: Node, tensorMap: NamedTensorsMap, context: ExecutionContext, + ops = tfOps): Tensor[] => { switch (node.op) { case 'TopKV2': { @@ -34,19 +35,19 @@ export const executeOp: InternalOpExecutor = const k = getParamValue('k', node, tensorMap, context) as number; const sorted = getParamValue('sorted', node, tensorMap, context) as boolean; - const result = tfOps.topk(x, k, sorted); + const result = ops.topk(x, k, sorted); return [result.values, result.indices]; } case 'Unique': { const x = getParamValue('x', node, tensorMap, context) as Tensor; - const result = tfOps.unique(x); + const result = ops.unique(x); return [result.values, result.indices]; } case 'UniqueV2': { const x = getParamValue('x', node, tensorMap, context) as Tensor; const axis = getParamValue('axis', node, tensorMap, context) as number; - const result = tfOps.unique(x, axis); + const result = ops.unique(x, axis); return [result.values, result.indices]; } default: diff --git a/tfjs-converter/src/operations/executors/evaluation_executor_test.ts b/tfjs-converter/src/operations/executors/evaluation_executor_test.ts index 35edbbb01d3..2f1bf42d9f4 100644 --- a/tfjs-converter/src/operations/executors/evaluation_executor_test.ts +++ b/tfjs-converter/src/operations/executors/evaluation_executor_test.ts @@ -21,6 +21,7 @@ import {ExecutionContext} from '../../executor/execution_context'; import {Node} from '../types'; import {executeOp} from './evaluation_executor'; +import {RecursiveSpy, spyOnAllFunctions} from './spy_ops'; import {createBoolAttr, createNumberAttrFromIndex, createTensorAttr} from './test_helper'; describe('evaluation', () => { @@ -43,15 +44,22 @@ describe('evaluation', () => { }); describe('executeOp', () => { + let spyOps: RecursiveSpy; + let spyOpsAsTfOps: typeof tfOps; + + beforeEach(() => { + spyOps = spyOnAllFunctions(tfOps); + spyOpsAsTfOps = spyOps as unknown as typeof tfOps; + }); + describe('TopKV2', () => { it('should return input', () => { node.op = 'TopKV2'; node.inputParams['x'] = createTensorAttr(0); node.inputParams['k'] = createNumberAttrFromIndex(1); node.attrParams['sorted'] = createBoolAttr(true); - spyOn(tfOps, 'topk').and.callThrough(); - executeOp(node, {input1, input2}, context); - expect(tfOps.topk).toHaveBeenCalledWith(input1[0], 1, true); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); + expect(spyOps.topk).toHaveBeenCalledWith(input1[0], 1, true); }); }); @@ -59,9 +67,8 @@ describe('evaluation', () => { it('should get called correctly', () => { node.op = 'Unique'; node.inputParams['x'] = createTensorAttr(0); - spyOn(tfOps, 'unique').and.callThrough(); - executeOp(node, {input1}, context); - expect(tfOps.unique).toHaveBeenCalledWith(input1[0]); + executeOp(node, {input1}, context, spyOpsAsTfOps); + expect(spyOps.unique).toHaveBeenCalledWith(input1[0]); }); }); @@ -70,11 +77,12 @@ describe('evaluation', () => { node.op = 'UniqueV2'; node.inputParams['x'] = createTensorAttr(0); node.inputParams['axis'] = createNumberAttrFromIndex(1); - spyOn(tfOps, 'unique').and.callThrough(); const xInput = [tfOps.tensor2d([[1], [2]])]; const axisInput = [tfOps.scalar(1)]; - executeOp(node, {'input1': xInput, 'input2': axisInput}, context); - expect(tfOps.unique).toHaveBeenCalledWith(xInput[0], 1); + executeOp( + node, {'input1': xInput, 'input2': axisInput}, context, + spyOpsAsTfOps); + expect(spyOps.unique).toHaveBeenCalledWith(xInput[0], 1); }); }); }); diff --git a/tfjs-converter/src/operations/executors/graph_executor.ts b/tfjs-converter/src/operations/executors/graph_executor.ts index b2c53c78a5a..f6ba57e768a 100644 --- a/tfjs-converter/src/operations/executors/graph_executor.ts +++ b/tfjs-converter/src/operations/executors/graph_executor.ts @@ -27,7 +27,7 @@ import {cloneTensor, getParamValue, getTensor} from './utils'; export const executeOp: InternalOpExecutor = (node: Node, tensorMap: NamedTensorsMap, - context: ExecutionContext): Tensor[] => { + context: ExecutionContext, ops = tfOps): Tensor[] => { switch (node.op) { case 'Const': { return tensorMap[node.name]; @@ -52,22 +52,22 @@ export const executeOp: InternalOpExecutor = (getParamValue('x', node, tensorMap, context) as Tensor); return [cloneTensor(snapshot)]; case 'Shape': - return [tfOps.tensor1d( + return [ops.tensor1d( (getParamValue('x', node, tensorMap, context) as Tensor).shape, 'int32')]; case 'ShapeN': return (getParamValue('x', node, tensorMap, context) as Tensor[]) - .map((t: Tensor) => tfOps.tensor1d(t.shape)); + .map((t: Tensor) => ops.tensor1d(t.shape)); case 'Size': - return [tfOps.scalar( + return [ops.scalar( (getParamValue('x', node, tensorMap, context) as Tensor).size, 'int32')]; case 'Rank': - return [tfOps.scalar( + return [ops.scalar( (getParamValue('x', node, tensorMap, context) as Tensor).rank, 'int32')]; case 'NoOp': - return [tfOps.scalar(1)]; + return [ops.scalar(1)]; case 'Print': const input = getParamValue('x', node, tensorMap, context) as Tensor; const data = diff --git a/tfjs-converter/src/operations/executors/image_executor.ts b/tfjs-converter/src/operations/executors/image_executor.ts index b598dd5f95c..87e29d40d9b 100644 --- a/tfjs-converter/src/operations/executors/image_executor.ts +++ b/tfjs-converter/src/operations/executors/image_executor.ts @@ -27,7 +27,7 @@ import {getParamValue} from './utils'; export const executeOp: InternalOpExecutor = (node: Node, tensorMap: NamedTensorsMap, - context: ExecutionContext): Tensor[] => { + context: ExecutionContext, ops = tfOps): Tensor[] => { switch (node.op) { case 'ResizeBilinear': { const images = @@ -40,7 +40,7 @@ export const executeOp: InternalOpExecutor = const halfPixelCenters = getParamValue('halfPixelCenters', node, tensorMap, context) as boolean; - return [tfOps.image.resizeBilinear( + return [ops.image.resizeBilinear( images as Tensor3D | Tensor4D, [size[0], size[1]], alignCorners, halfPixelCenters)]; } @@ -55,7 +55,7 @@ export const executeOp: InternalOpExecutor = const halfPixelCenters = getParamValue('halfPixelCenters', node, tensorMap, context) as boolean; - return [tfOps.image.resizeNearestNeighbor( + return [ops.image.resizeNearestNeighbor( images as Tensor3D | Tensor4D, [size[0], size[1]], alignCorners, halfPixelCenters)]; } @@ -73,7 +73,7 @@ export const executeOp: InternalOpExecutor = const extrapolationValue = getParamValue('extrapolationValue', node, tensorMap, context) as number; - return [tfOps.image.cropAndResize( + return [ops.image.cropAndResize( image as Tensor4D, boxes as Tensor2D, boxInd as Tensor1D, cropSize as [number, number], method as 'bilinear' | 'nearest', extrapolationValue)]; @@ -93,7 +93,7 @@ export const executeOp: InternalOpExecutor = string; const fillMode = getParamValue('fillMode', node, tensorMap, context) as string; - return [tfOps.image.transform( + return [ops.image.transform( images as Tensor4D, transforms as Tensor2D, interpolation.toLowerCase() as 'bilinear' | 'nearest', diff --git a/tfjs-converter/src/operations/executors/image_executor_test.ts b/tfjs-converter/src/operations/executors/image_executor_test.ts index 74d83f088e3..ef73eb97957 100644 --- a/tfjs-converter/src/operations/executors/image_executor_test.ts +++ b/tfjs-converter/src/operations/executors/image_executor_test.ts @@ -23,13 +23,18 @@ import {Node} from '../types'; import {executeOp} from './image_executor'; import {createBoolAttr, createNumberAttr, createNumberAttrFromIndex, createNumericArrayAttrFromIndex, createStrAttr, createTensorAttr, validateParam} from './test_helper'; +import {spyOnAllFunctions, RecursiveSpy} from './spy_ops'; describe('image', () => { let node: Node; const input1 = [tfOps.tensor1d([1])]; const context = new ExecutionContext({}, {}, {}); + let spyOps: RecursiveSpy; + let spyOpsAsTfOps: typeof tfOps; beforeEach(() => { + spyOps = spyOnAllFunctions(tfOps); + spyOpsAsTfOps = spyOps as unknown as typeof tfOps; node = { name: 'input1', op: '', @@ -52,9 +57,10 @@ describe('image', () => { node.attrParams['halfPixelCenters'] = createBoolAttr(true); node.inputNames = ['input1', 'input2']; const input2 = [tfOps.tensor1d([1, 2])]; - spyOn(tfOps.image, 'resizeBilinear'); - executeOp(node, {input1, input2}, context); - expect(tfOps.image.resizeBilinear) + spyOps.image.resizeBilinear.and.returnValue({}); + + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); + expect(spyOps.image.resizeBilinear) .toHaveBeenCalledWith(input1[0], [1, 2], true, true); }); it('should match json def', () => { @@ -76,9 +82,10 @@ describe('image', () => { node.attrParams['halfPixelCenters'] = createBoolAttr(true); node.inputNames = ['input1', 'input2']; const input2 = [tfOps.tensor1d([1, 2])]; - spyOn(tfOps.image, 'resizeNearestNeighbor'); - executeOp(node, {input1, input2}, context); - expect(tfOps.image.resizeNearestNeighbor) + spyOps.image.resizeNearestNeighbor.and.returnValue({}); + + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); + expect(spyOps.image.resizeNearestNeighbor) .toHaveBeenCalledWith(input1[0], [1, 2], true, true); }); it('should match json def', () => { @@ -102,13 +109,14 @@ describe('image', () => { node.attrParams['extrapolationValue'] = createNumberAttr(0.5); node.inputNames = ['input1', 'input2', 'input3', 'input4']; - spyOn(tfOps.image, 'cropAndResize'); + spyOps.image.cropAndResize.and.returnValue({}); const input2 = [tfOps.tensor1d([2])]; const input3 = [tfOps.tensor1d([3])]; const input4 = [tfOps.tensor1d([4, 5])]; - executeOp(node, {input1, input2, input3, input4}, context); - expect(tfOps.image.cropAndResize) + executeOp(node, {input1, input2, input3, input4}, context, + spyOpsAsTfOps); + expect(spyOps.image.cropAndResize) .toHaveBeenCalledWith( input1[0], input2[0], input3[0], [4, 5], 'bilinear', 0.5); }); @@ -137,13 +145,14 @@ describe('image', () => { node.attrParams['fillMode'] = createStrAttr('constant'); node.inputNames = ['input1', 'input2', 'input3', 'input4']; - spyOn(tfOps.image, 'transform'); - const input2 = [tfOps.tensor1d([2])]; + const input1 = [tfOps.tensor4d([1], [1, 1, 1, 1])]; + const input2 = [tfOps.tensor2d([1, 2, 3, 4, 5, 6, 7, 8], [1, 8])]; const input3 = [tfOps.tensor1d([4, 5])]; const input4 = [tfOps.scalar(3)]; - executeOp(node, {input1, input2, input3, input4}, context); - expect(tfOps.image.transform) + executeOp(node, {input1, input2, input3, input4}, context, + spyOpsAsTfOps); + expect(spyOps.image.transform) .toHaveBeenCalledWith( input1[0], input2[0], 'bilinear', 'constant', 3, [4, 5]); }); diff --git a/tfjs-converter/src/operations/executors/logical_executor.ts b/tfjs-converter/src/operations/executors/logical_executor.ts index 2e999f34f2d..17d43b02941 100644 --- a/tfjs-converter/src/operations/executors/logical_executor.ts +++ b/tfjs-converter/src/operations/executors/logical_executor.ts @@ -27,55 +27,55 @@ import {getParamValue} from './utils'; export const executeOp: InternalOpExecutor = (node: Node, tensorMap: NamedTensorsMap, - context: ExecutionContext): Tensor[] => { + context: ExecutionContext, ops = tfOps): Tensor[] => { switch (node.op) { case 'Equal': { - return [tfOps.equal( + return [ops.equal( getParamValue('a', node, tensorMap, context) as Tensor, getParamValue('b', node, tensorMap, context) as Tensor)]; } case 'NotEqual': { - return [tfOps.notEqual( + return [ops.notEqual( getParamValue('a', node, tensorMap, context) as Tensor, getParamValue('b', node, tensorMap, context) as Tensor)]; } case 'Greater': { - return [tfOps.greater( + return [ops.greater( getParamValue('a', node, tensorMap, context) as Tensor, getParamValue('b', node, tensorMap, context) as Tensor)]; } case 'GreaterEqual': { - return [tfOps.greaterEqual( + return [ops.greaterEqual( getParamValue('a', node, tensorMap, context) as Tensor, getParamValue('b', node, tensorMap, context) as Tensor)]; } case 'Less': { - return [tfOps.less( + return [ops.less( getParamValue('a', node, tensorMap, context) as Tensor, getParamValue('b', node, tensorMap, context) as Tensor)]; } case 'LessEqual': { - return [tfOps.lessEqual( + return [ops.lessEqual( getParamValue('a', node, tensorMap, context) as Tensor, getParamValue('b', node, tensorMap, context) as Tensor)]; } case 'LogicalAnd': { - return [tfOps.logicalAnd( + return [ops.logicalAnd( getParamValue('a', node, tensorMap, context) as Tensor, getParamValue('b', node, tensorMap, context) as Tensor)]; } case 'LogicalNot': { - return [tfOps.logicalNot( + return [ops.logicalNot( getParamValue('a', node, tensorMap, context) as Tensor)]; } case 'LogicalOr': { - return [tfOps.logicalOr( + return [ops.logicalOr( getParamValue('a', node, tensorMap, context) as Tensor, getParamValue('b', node, tensorMap, context) as Tensor)]; } case 'Select': case 'SelectV2': { - return [tfOps.where( + return [ops.where( getParamValue('condition', node, tensorMap, context) as Tensor, getParamValue('a', node, tensorMap, context) as Tensor, getParamValue('b', node, tensorMap, context) as Tensor)]; diff --git a/tfjs-converter/src/operations/executors/logical_executor_test.ts b/tfjs-converter/src/operations/executors/logical_executor_test.ts index 1840902e553..80da758eb3b 100644 --- a/tfjs-converter/src/operations/executors/logical_executor_test.ts +++ b/tfjs-converter/src/operations/executors/logical_executor_test.ts @@ -21,7 +21,8 @@ import {ExecutionContext} from '../../executor/execution_context'; import {Node} from '../types'; import {executeOp} from './logical_executor'; -import {createTensorAttr} from './test_helper'; +import {RecursiveSpy, spyOnAllFunctions} from './spy_ops'; +import {createTensorAttr, uncapitalize} from './test_helper'; describe('logical', () => { let node: Node; @@ -43,52 +44,63 @@ describe('logical', () => { }); describe('executeOp', () => { - ['Equal', 'NotEqual', 'Greater', 'GreaterEqual', 'Less', 'LessEqual', - 'LogicalAnd', 'LogicalOr'] + let spyOps: RecursiveSpy; + let spyOpsAsTfOps: typeof tfOps; + + beforeEach(() => { + spyOps = spyOnAllFunctions(tfOps); + spyOpsAsTfOps = spyOps as unknown as typeof tfOps; + }); + + ([ + 'Equal', 'NotEqual', 'Greater', 'GreaterEqual', 'Less', 'LessEqual', + 'LogicalAnd', 'LogicalOr' + ] as const ) .forEach(op => { it('should call tfOps.' + op, () => { - const spy = spyOn( - tfOps, op.charAt(0).toLowerCase() + op.slice(1) as 'equal'); node.op = op; - executeOp(node, {input1, input2}, context); + spyOps[uncapitalize(op)].and.returnValue({}); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(spy).toHaveBeenCalledWith(input1[0], input2[0]); + expect(spyOps[uncapitalize(op)]) + .toHaveBeenCalledWith(input1[0], input2[0]); }); }); describe('LogicalNot', () => { it('should call tfOps.logicalNot', () => { - spyOn(tfOps, 'logicalNot'); node.op = 'LogicalNot'; - executeOp(node, {input1}, context); + spyOps.logicalNot.and.returnValue({}); + + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.logicalNot).toHaveBeenCalledWith(input1[0]); + expect(spyOps.logicalNot).toHaveBeenCalledWith(input1[0]); }); }); describe('Select', () => { it('should call tfOps.where', () => { - spyOn(tfOps, 'where'); node.op = 'Select'; node.inputNames = ['input1', 'input2', 'input3']; node.inputParams.condition = createTensorAttr(2); const input3 = [tfOps.scalar(1)]; - executeOp(node, {input1, input2, input3}, context); + spyOps.where.and.returnValue({}); + executeOp(node, {input1, input2, input3}, context, spyOpsAsTfOps); - expect(tfOps.where) + expect(spyOps.where) .toHaveBeenCalledWith(input3[0], input1[0], input2[0]); }); }); describe('SelectV2', () => { it('should call tfOps.where', () => { - spyOn(tfOps, 'where'); node.op = 'SelectV2'; node.inputNames = ['input1', 'input2', 'input3']; node.inputParams.condition = createTensorAttr(2); const input3 = [tfOps.scalar(1)]; - executeOp(node, {input1, input2, input3}, context); + spyOps.where.and.returnValue({}); + executeOp(node, {input1, input2, input3}, context, spyOpsAsTfOps); - expect(tfOps.where) + expect(spyOps.where) .toHaveBeenCalledWith(input3[0], input1[0], input2[0]); }); }); diff --git a/tfjs-converter/src/operations/executors/matrices_executor.ts b/tfjs-converter/src/operations/executors/matrices_executor.ts index 1a0cdb66628..28a275e071d 100644 --- a/tfjs-converter/src/operations/executors/matrices_executor.ts +++ b/tfjs-converter/src/operations/executors/matrices_executor.ts @@ -27,12 +27,12 @@ import {getParamValue} from './utils'; export const executeOp: InternalOpExecutor = (node: Node, tensorMap: NamedTensorsMap, - context: ExecutionContext): Tensor[] => { + context: ExecutionContext, ops = tfOps): Tensor[] => { switch (node.op) { case 'BatchMatMul': case 'BatchMatMulV2': case 'MatMul': - return [tfOps.matMul( + return [ops.matMul( getParamValue('a', node, tensorMap, context) as Tensor2D, getParamValue('b', node, tensorMap, context) as Tensor2D, getParamValue('transposeA', node, tensorMap, context) as boolean, @@ -40,13 +40,13 @@ export const executeOp: InternalOpExecutor = boolean)]; case 'Einsum': - return [tfOps.einsum( + return [ops.einsum( getParamValue('equation', node, tensorMap, context) as string, ...getParamValue('tensors', node, tensorMap, context) as Tensor[])]; case 'Transpose': - return [tfOps.transpose( + return [ops.transpose( getParamValue('x', node, tensorMap, context) as Tensor, getParamValue('perm', node, tensorMap, context) as number[])]; @@ -76,7 +76,7 @@ export const executeOp: InternalOpExecutor = } const [biasArg, preluArg] = getParamValue('args', node, tensorMap, context) as Tensor[]; - return [tfOps.fused.matMul({ + return [ops.fused.matMul({ a: getParamValue('a', node, tensorMap, context) as Tensor2D, b: getParamValue('b', node, tensorMap, context) as Tensor2D, transposeA: getParamValue('transposeA', node, tensorMap, context) as diff --git a/tfjs-converter/src/operations/executors/matrices_executor_test.ts b/tfjs-converter/src/operations/executors/matrices_executor_test.ts index 1ab428c0046..b9a021afa9a 100644 --- a/tfjs-converter/src/operations/executors/matrices_executor_test.ts +++ b/tfjs-converter/src/operations/executors/matrices_executor_test.ts @@ -23,6 +23,7 @@ import {ExecutionContext} from '../../executor/execution_context'; import {Node} from '../types'; import {executeOp} from './matrices_executor'; +import {RecursiveSpy, spyOnAllFunctions} from './spy_ops'; import {createBoolAttr, createNumberAttr, createNumericArrayAttr, createStrArrayAttr, createStrAttr, createTensorAttr, createTensorsAttr} from './test_helper'; describe('matrices', () => { @@ -30,8 +31,12 @@ describe('matrices', () => { const input1 = [tfOps.scalar(1)]; const input2 = [tfOps.scalar(2)]; const context = new ExecutionContext({}, {}, {}); + let spyOps: RecursiveSpy; + let spyOpsAsTfOps: typeof tfOps; beforeEach(() => { + spyOps = spyOnAllFunctions(tfOps); + spyOpsAsTfOps = spyOps as unknown as typeof tfOps; node = { name: 'test', op: '', @@ -47,19 +52,18 @@ describe('matrices', () => { describe('executeOp', () => { describe('MatMul', () => { it('should call tfOps.matMul', () => { - spyOn(tfOps, 'matMul'); node.op = 'MatMul'; node.attrParams.transposeA = createBoolAttr(true); node.attrParams.transposeB = createBoolAttr(false); - executeOp(node, {input1, input2}, context); + spyOps.matMul.and.returnValue({}); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.matMul) + expect(spyOps.matMul) .toHaveBeenCalledWith(input1[0], input2[0], true, false); }); }); describe('_FusedMatMul', () => { it('should call tfOps.fused.matMul', () => { - spyOn(tfOps.fused, 'matMul'); node.op = '_FusedMatMul'; node.inputParams['args'] = createTensorsAttr(2, 0); node.attrParams['fusedOps'] = createStrArrayAttr(['biasadd', 'relu']); @@ -68,9 +72,10 @@ describe('matrices', () => { node.attrParams.transposeB = createBoolAttr(false); const input3 = [tfOps.scalar(3.0)]; node.inputNames = ['input1', 'input2', 'input3']; - executeOp(node, {input1, input2, input3}, context); + spyOps.fused.matMul.and.returnValue({}); + executeOp(node, {input1, input2, input3}, context, spyOpsAsTfOps); - expect(tfOps.fused.matMul).toHaveBeenCalledWith({ + expect(spyOps.fused.matMul).toHaveBeenCalledWith({ a: input1[0], b: input2[0], transposeA: true, @@ -82,7 +87,6 @@ describe('matrices', () => { }); }); it('should call tfOps.fused.matMul - prelu activation', () => { - spyOn(tfOps.fused, 'matMul'); node.op = '_FusedMatMul'; node.inputParams['args'] = createTensorsAttr(2, 0); node.attrParams['fusedOps'] = createStrArrayAttr(['biasadd', 'prelu']); @@ -92,9 +96,11 @@ describe('matrices', () => { const input3 = [tfOps.scalar(3.0)]; const input4 = [tfOps.scalar(4.0)]; node.inputNames = ['input1', 'input2', 'input3', 'input4']; - executeOp(node, {input1, input2, input3, input4}, context); + spyOps.fused.matMul.and.returnValue({}); + executeOp( + node, {input1, input2, input3, input4}, context, spyOpsAsTfOps); - expect(tfOps.fused.matMul).toHaveBeenCalledWith({ + expect(spyOps.fused.matMul).toHaveBeenCalledWith({ a: input1[0], b: input2[0], transposeA: true, @@ -106,7 +112,6 @@ describe('matrices', () => { }); }); it('should call tfOps.fused.matMul - leakyrelu activation', () => { - spyOn(tfOps.fused, 'matMul'); node.op = '_FusedMatMul'; node.inputParams['args'] = createTensorsAttr(2, 0); node.attrParams['fusedOps'] = @@ -117,9 +122,10 @@ describe('matrices', () => { node.attrParams.leakyreluAlpha = createNumberAttr(0.3); const input3 = [tfOps.scalar(3.0)]; node.inputNames = ['input1', 'input2', 'input3']; - executeOp(node, {input1, input2, input3}, context); + spyOps.fused.matMul.and.returnValue({}); + executeOp(node, {input1, input2, input3}, context, spyOpsAsTfOps); - expect(tfOps.fused.matMul).toHaveBeenCalledWith({ + expect(spyOps.fused.matMul).toHaveBeenCalledWith({ a: input1[0], b: input2[0], transposeA: true, @@ -133,52 +139,51 @@ describe('matrices', () => { }); describe('BatchMatMul', () => { it('should call tfOps.matMul', () => { - spyOn(tfOps, 'matMul'); node.op = 'BatchMatMul'; node.attrParams.transposeA = createBoolAttr(true); node.attrParams.transposeB = createBoolAttr(false); - executeOp(node, {input1, input2}, context); + spyOps.matMul.and.returnValue({}); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.matMul) + expect(spyOps.matMul) .toHaveBeenCalledWith(input1[0], input2[0], true, false); }); }); describe('BatchMatMulV2', () => { it('should call tfOps.matMul', () => { - spyOn(tfOps, 'matMul'); node.op = 'BatchMatMulV2'; node.attrParams.transposeA = createBoolAttr(true); node.attrParams.transposeB = createBoolAttr(false); - executeOp(node, {input1, input2}, context); + spyOps.matMul.and.returnValue({}); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.matMul) + expect(spyOps.matMul) .toHaveBeenCalledWith(input1[0], input2[0], true, false); }); }); describe('Einsum', () => { it('should call tfOps.einsum', () => { - const spy = spyOn(tfOps, 'einsum').and.callThrough(); node.op = 'Einsum'; node.inputParams = {tensors: createTensorsAttr(0, 0)}; node.inputNames = ['input1', 'input2']; node.attrParams.equation = createStrAttr(',->'); - executeOp(node, {input1, input2}, context); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); const res = executeOp(node, {input1, input2}, context) as Tensor[]; - expect(spy).toHaveBeenCalledWith(',->', input1[0], input2[0]); + expect(spyOps.einsum).toHaveBeenCalledWith(',->', input1[0], input2[0]); expect(res[0].dtype).toBe('float32'); expect(res[0].shape).toEqual([]); }); }); describe('Transpose', () => { it('should call tfOps.transpose', () => { - spyOn(tfOps, 'transpose'); node.op = 'Transpose'; node.inputNames = ['input1', 'input2', 'input3']; node.inputParams.x = createTensorAttr(0); node.attrParams.perm = createNumericArrayAttr([1, 2]); - executeOp(node, {input1}, context); + spyOps.transpose.and.returnValue({}); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.transpose).toHaveBeenCalledWith(input1[0], [1, 2]); + expect(spyOps.transpose).toHaveBeenCalledWith(input1[0], [1, 2]); }); }); }); diff --git a/tfjs-converter/src/operations/executors/normalization_executor.ts b/tfjs-converter/src/operations/executors/normalization_executor.ts index adc3a381ea5..541d4fa89bf 100644 --- a/tfjs-converter/src/operations/executors/normalization_executor.ts +++ b/tfjs-converter/src/operations/executors/normalization_executor.ts @@ -27,11 +27,11 @@ import {getParamValue} from './utils'; export const executeOp: InternalOpExecutor = (node: Node, tensorMap: NamedTensorsMap, - context: ExecutionContext): Tensor[] => { + context: ExecutionContext, ops = tfOps): Tensor[] => { switch (node.op) { case 'FusedBatchNorm': case 'FusedBatchNormV2': { - return [tfOps.batchNorm( + return [ops.batchNorm( getParamValue('x', node, tensorMap, context) as Tensor, getParamValue('mean', node, tensorMap, context) as Tensor, getParamValue('variance', node, tensorMap, context) as Tensor, @@ -40,7 +40,7 @@ export const executeOp: InternalOpExecutor = getParamValue('epsilon', node, tensorMap, context) as number)]; } case 'FusedBatchNormV3': { - return [tfOps.batchNorm( + return [ops.batchNorm( getParamValue('x', node, tensorMap, context) as Tensor, getParamValue('mean', node, tensorMap, context) as Tensor, getParamValue('variance', node, tensorMap, context) as Tensor, @@ -49,7 +49,7 @@ export const executeOp: InternalOpExecutor = getParamValue('epsilon', node, tensorMap, context) as number)]; } case 'LRN': { - return [tfOps.localResponseNormalization( + return [ops.localResponseNormalization( getParamValue('x', node, tensorMap, context) as Tensor3D | Tensor4D, getParamValue('radius', node, tensorMap, context) as number, @@ -58,15 +58,15 @@ export const executeOp: InternalOpExecutor = getParamValue('beta', node, tensorMap, context) as number)]; } case 'Softmax': { - return [tfOps.softmax( + return [ops.softmax( getParamValue('x', node, tensorMap, context) as Tensor)]; } case 'LogSoftmax': { - return [tfOps.logSoftmax( + return [ops.logSoftmax( getParamValue('x', node, tensorMap, context) as Tensor)]; } case 'SparseToDense': { - return [tfOps.sparseToDense( + return [ops.sparseToDense( getParamValue('sparseIndices', node, tensorMap, context) as Tensor, getParamValue('outputShape', node, tensorMap, context) as Tensor, diff --git a/tfjs-converter/src/operations/executors/normalization_executor_test.ts b/tfjs-converter/src/operations/executors/normalization_executor_test.ts index 4f38c820285..b5c12a3f56b 100644 --- a/tfjs-converter/src/operations/executors/normalization_executor_test.ts +++ b/tfjs-converter/src/operations/executors/normalization_executor_test.ts @@ -23,13 +23,18 @@ import {Node} from '../types'; import {executeOp} from './normalization_executor'; import {createNumberAttr, createNumericArrayAttrFromIndex, createTensorAttr, validateParam} from './test_helper'; +import {spyOnAllFunctions, RecursiveSpy} from './spy_ops'; describe('normalization', () => { let node: Node; const input1 = [tfOps.scalar(1)]; const context = new ExecutionContext({}, {}, {}); + let spyOps: RecursiveSpy; + let spyOpsAsTfOps: typeof tfOps; beforeEach(() => { + spyOps = spyOnAllFunctions(tfOps); + spyOpsAsTfOps = spyOps as unknown as typeof tfOps; node = { name: 'test', op: '', @@ -45,7 +50,6 @@ describe('normalization', () => { describe('executeOp', () => { describe('FusedBatchNorm', () => { it('should call tfOps.batchNorm', () => { - spyOn(tfOps, 'batchNorm'); node.op = 'FusedBatchNorm'; node.inputParams.scale = createTensorAttr(1); node.inputParams.offset = createTensorAttr(2); @@ -57,16 +61,16 @@ describe('normalization', () => { const input3 = [tfOps.scalar(2)]; const input4 = [tfOps.scalar(3)]; const input5 = [tfOps.scalar(4)]; - executeOp(node, {input1, input2, input3, input4, input5}, context); + executeOp(node, {input1, input2, input3, input4, input5}, context, + spyOpsAsTfOps); - expect(tfOps.batchNorm) + expect(spyOps.batchNorm) .toHaveBeenCalledWith( input1[0], input4[0], input5[0], input3[0], input2[0], 5); }); }); describe('FusedBatchNormV2', () => { it('should call tfOps.batchNorm', () => { - spyOn(tfOps, 'batchNorm'); node.op = 'FusedBatchNormV2'; node.inputParams.scale = createTensorAttr(1); node.inputParams.offset = createTensorAttr(2); @@ -78,16 +82,16 @@ describe('normalization', () => { const input3 = [tfOps.scalar(2)]; const input4 = [tfOps.scalar(3)]; const input5 = [tfOps.scalar(4)]; - executeOp(node, {input1, input2, input3, input4, input5}, context); + executeOp(node, {input1, input2, input3, input4, input5}, context, + spyOpsAsTfOps); - expect(tfOps.batchNorm) + expect(spyOps.batchNorm) .toHaveBeenCalledWith( input1[0], input4[0], input5[0], input3[0], input2[0], 5); }); }); describe('FusedBatchNormV3', () => { it('should call tfOps.batchNorm', () => { - spyOn(tfOps, 'batchNorm'); node.op = 'FusedBatchNormV3'; node.inputParams.scale = createTensorAttr(1); node.inputParams.offset = createTensorAttr(2); @@ -99,25 +103,26 @@ describe('normalization', () => { const input3 = [tfOps.scalar(2)]; const input4 = [tfOps.scalar(3)]; const input5 = [tfOps.scalar(4)]; - executeOp(node, {input1, input2, input3, input4, input5}, context); + executeOp(node, {input1, input2, input3, input4, input5}, context, + spyOpsAsTfOps); - expect(tfOps.batchNorm) + expect(spyOps.batchNorm) .toHaveBeenCalledWith( input1[0], input4[0], input5[0], input3[0], input2[0], 5); }); }); describe('LRN', () => { it('should call tfOps.localResponseNormalization', () => { - spyOn(tfOps, 'localResponseNormalization'); node.op = 'LRN'; node.attrParams.radius = createNumberAttr(1); node.attrParams.bias = createNumberAttr(2); node.attrParams.alpha = createNumberAttr(3); node.attrParams.beta = createNumberAttr(4); + spyOps.localResponseNormalization.and.returnValue({}); - executeOp(node, {input1}, context); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.localResponseNormalization) + expect(spyOps.localResponseNormalization) .toHaveBeenCalledWith(input1[0], 1, 2, 3, 4); }); it('should match json def', () => { @@ -133,12 +138,12 @@ describe('normalization', () => { describe('Softmax', () => { it('should call tfOps.softmax', () => { - spyOn(tfOps, 'softmax'); node.op = 'Softmax'; + spyOps.softmax.and.returnValue({}); - executeOp(node, {input1}, context); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.softmax).toHaveBeenCalledWith(input1[0]); + expect(spyOps.softmax).toHaveBeenCalledWith(input1[0]); }); it('should match json def', () => { node.op = 'Softmax'; @@ -149,12 +154,12 @@ describe('normalization', () => { describe('LogSoftmax', () => { it('should call tfOps.logSoftmax', () => { - spyOn(tfOps, 'logSoftmax'); node.op = 'LogSoftmax'; + spyOps.logSoftmax.and.returnValue({}); - executeOp(node, {input1}, context); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.logSoftmax).toHaveBeenCalledWith(input1[0]); + expect(spyOps.logSoftmax).toHaveBeenCalledWith(input1[0]); }); it('should match json def', () => { node.op = 'LogSoftmax'; @@ -164,7 +169,6 @@ describe('normalization', () => { }); describe('SparseToDense', () => { it('should call tfOps.sparseToDense', () => { - spyOn(tfOps, 'sparseToDense'); node.op = 'SparseToDense'; node.inputParams.sparseIndices = createTensorAttr(0); node.inputParams.outputShape = createNumericArrayAttrFromIndex(1); @@ -174,9 +178,11 @@ describe('normalization', () => { const input2 = [tfOps.tensor1d([1], 'int32')]; const input3 = [tfOps.scalar(2)]; const input4 = [tfOps.scalar(3)]; - executeOp(node, {input1, input2, input3, input4}, context); + spyOps.sparseToDense.and.returnValue({}); + executeOp(node, {input1, input2, input3, input4}, context, + spyOpsAsTfOps); - expect(tfOps.sparseToDense) + expect(spyOps.sparseToDense) .toHaveBeenCalledWith(input1[0], [1], input3[0], input4[0]); }); it('should match json def', () => { diff --git a/tfjs-converter/src/operations/executors/reduction_executor.ts b/tfjs-converter/src/operations/executors/reduction_executor.ts index 912537dc862..f2d9e2c5c82 100644 --- a/tfjs-converter/src/operations/executors/reduction_executor.ts +++ b/tfjs-converter/src/operations/executors/reduction_executor.ts @@ -27,14 +27,14 @@ import {getParamValue} from './utils'; export const executeOp: InternalOpExecutor = (node: Node, tensorMap: NamedTensorsMap, - context: ExecutionContext): Tensor[] => { + context: ExecutionContext, ops = tfOps): Tensor[] => { switch (node.op) { case 'Max': { const axis = getParamValue('axis', node, tensorMap, context) as number[]; const keepDims = getParamValue('keepDims', node, tensorMap, context) as boolean; - return [tfOps.max( + return [ops.max( getParamValue('x', node, tensorMap, context) as Tensor, axis, keepDims)]; } @@ -43,7 +43,7 @@ export const executeOp: InternalOpExecutor = getParamValue('axis', node, tensorMap, context) as number[]; const keepDims = getParamValue('keepDims', node, tensorMap, context) as boolean; - return [tfOps.mean( + return [ops.mean( getParamValue('x', node, tensorMap, context) as Tensor, axis, keepDims)]; } @@ -52,7 +52,7 @@ export const executeOp: InternalOpExecutor = getParamValue('axis', node, tensorMap, context) as number[]; const keepDims = getParamValue('keepDims', node, tensorMap, context) as boolean; - return [tfOps.min( + return [ops.min( getParamValue('x', node, tensorMap, context) as Tensor, axis, keepDims)]; } @@ -61,7 +61,7 @@ export const executeOp: InternalOpExecutor = getParamValue('axis', node, tensorMap, context) as number[]; const keepDims = getParamValue('keepDims', node, tensorMap, context) as boolean; - return [tfOps.sum( + return [ops.sum( getParamValue('x', node, tensorMap, context) as Tensor, axis, keepDims)]; } @@ -70,7 +70,7 @@ export const executeOp: InternalOpExecutor = getParamValue('axis', node, tensorMap, context) as number[]; const keepDims = getParamValue('keepDims', node, tensorMap, context) as boolean; - return [tfOps.all( + return [ops.all( getParamValue('x', node, tensorMap, context) as Tensor, axis, keepDims)]; } @@ -79,20 +79,20 @@ export const executeOp: InternalOpExecutor = getParamValue('axis', node, tensorMap, context) as number[]; const keepDims = getParamValue('keepDims', node, tensorMap, context) as boolean; - return [tfOps.any( + return [ops.any( getParamValue('x', node, tensorMap, context) as Tensor, axis, keepDims)]; } case 'ArgMax': { const axis = getParamValue('axis', node, tensorMap, context) as number; - return [tfOps.argMax( + return [ops.argMax( getParamValue('x', node, tensorMap, context) as Tensor, axis)]; } case 'ArgMin': { const axis = getParamValue('axis', node, tensorMap, context) as number; - return [tfOps.argMin( + return [ops.argMin( getParamValue('x', node, tensorMap, context) as Tensor, axis)]; } case 'Prod': { @@ -100,7 +100,7 @@ export const executeOp: InternalOpExecutor = getParamValue('axis', node, tensorMap, context) as number[]; const keepDims = getParamValue('keepDims', node, tensorMap, context) as boolean; - return [tfOps.prod( + return [ops.prod( getParamValue('x', node, tensorMap, context) as Tensor, axis, keepDims)]; } @@ -111,7 +111,7 @@ export const executeOp: InternalOpExecutor = getParamValue('exclusive', node, tensorMap, context) as boolean; const reverse = getParamValue('reverse', node, tensorMap, context) as boolean; - return [tfOps.cumprod( + return [ops.cumprod( getParamValue('x', node, tensorMap, context) as Tensor, axis, exclusive, reverse)]; } @@ -122,7 +122,7 @@ export const executeOp: InternalOpExecutor = getParamValue('exclusive', node, tensorMap, context) as boolean; const reverse = getParamValue('reverse', node, tensorMap, context) as boolean; - return [tfOps.cumsum( + return [ops.cumsum( getParamValue('x', node, tensorMap, context) as Tensor, axis, exclusive, reverse)]; } @@ -133,7 +133,7 @@ export const executeOp: InternalOpExecutor = const size = getParamValue('size', node, tensorMap, context) as number; - return [tfOps.bincount(x, weights, size)]; + return [ops.bincount(x, weights, size)]; case 'DenseBincount': { const x = getParamValue('x', node, tensorMap, context) as Tensor1D | Tensor2D; @@ -147,7 +147,7 @@ export const executeOp: InternalOpExecutor = getParamValue('binaryOutput', node, tensorMap, context) as boolean; - return [tfOps.denseBincount(x, weights, size, binaryOutput)]; + return [ops.denseBincount(x, weights, size, binaryOutput)]; } default: throw TypeError(`Node type ${node.op} is not implemented`); diff --git a/tfjs-converter/src/operations/executors/reduction_executor_test.ts b/tfjs-converter/src/operations/executors/reduction_executor_test.ts index a2c31c11825..9950e164016 100644 --- a/tfjs-converter/src/operations/executors/reduction_executor_test.ts +++ b/tfjs-converter/src/operations/executors/reduction_executor_test.ts @@ -22,14 +22,19 @@ import * as reduction from '../op_list/reduction'; import {Node} from '../types'; import {executeOp} from './reduction_executor'; -import {createBoolAttr, createNumberAttr, createNumberAttrFromIndex, createTensorAttr, validateParam} from './test_helper'; +import {RecursiveSpy, spyOnAllFunctions} from './spy_ops'; +import {createBoolAttr, createNumberAttr, createNumberAttrFromIndex, createTensorAttr, uncapitalize, validateParam} from './test_helper'; describe('reduction', () => { let node: Node; const input1 = [tfOps.scalar(1)]; const context = new ExecutionContext({}, {}, {}); + let spyOps: RecursiveSpy; + let spyOpsAsTfOps: typeof tfOps; beforeEach(() => { + spyOps = spyOnAllFunctions(tfOps); + spyOpsAsTfOps = spyOps as unknown as typeof tfOps; node = { name: 'test', op: '', @@ -43,71 +48,69 @@ describe('reduction', () => { }); describe('executeOp', () => { - ['Max', 'Mean', 'Min', 'Sum', 'All', 'Any', 'Prod'].forEach(op => { - it('should call tfOps.' + op, () => { - const spy = - spyOn(tfOps, op.charAt(0).toLowerCase() + op.slice(1) as 'max'); - node.op = op; - node.attrParams.keepDims = createBoolAttr(true); - node.attrParams.axis = createNumberAttr(1); - executeOp(node, {input1}, context); + (['Max', 'Mean', 'Min', 'Sum', 'All', 'Any', 'Prod'] as const ) + .forEach(op => { + it('should call tfOps.' + op, () => { + node.op = op; + node.attrParams.keepDims = createBoolAttr(true); + node.attrParams.axis = createNumberAttr(1); + spyOps[uncapitalize(op)].and.returnValue({}); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(spy).toHaveBeenCalledWith(input1[0], 1, true); - }); - }); + expect(spyOps[uncapitalize(op)]) + .toHaveBeenCalledWith(input1[0], 1, true); + }); + }); describe('ArgMax', () => { it('should call tfOps.argMax', () => { - spyOn(tfOps, 'argMax'); node.op = 'ArgMax'; node.attrParams.keepDims = createBoolAttr(true); node.attrParams.axis = createNumberAttr(1); - executeOp(node, {input1}, context); + spyOps.argMax.and.returnValue({}); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.argMax).toHaveBeenCalledWith(input1[0], 1); + expect(spyOps.argMax).toHaveBeenCalledWith(input1[0], 1); }); }); describe('ArgMin', () => { it('should call tfOps.argMin', () => { - spyOn(tfOps, 'argMin'); node.op = 'ArgMin'; node.attrParams.keepDims = createBoolAttr(true); node.attrParams.axis = createNumberAttr(1); - executeOp(node, {input1}, context); + spyOps.argMin.and.returnValue({}); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.argMin).toHaveBeenCalledWith(input1[0], 1); + expect(spyOps.argMin).toHaveBeenCalledWith(input1[0], 1); }); }); describe('Cumprod', () => { it('should call tfOps.cumprod', () => { - spyOn(tfOps, 'cumprod'); node.op = 'Cumprod'; node.attrParams.exclusive = createBoolAttr(true); node.attrParams.reverse = createBoolAttr(false); node.inputNames = ['input1', 'input2']; node.inputParams.axis = createNumberAttrFromIndex(1); const input2 = [tfOps.scalar(2)]; - executeOp(node, {input1, input2}, context); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.cumprod).toHaveBeenCalledWith(input1[0], 2, true, false); + expect(spyOps.cumprod).toHaveBeenCalledWith(input1[0], 2, true, false); }); }); describe('Cumsum', () => { it('should call tfOps.cumsum', () => { - spyOn(tfOps, 'cumsum'); node.op = 'Cumsum'; node.attrParams.exclusive = createBoolAttr(true); node.attrParams.reverse = createBoolAttr(false); node.inputNames = ['input1', 'input2']; node.inputParams.axis = createNumberAttrFromIndex(1); const input2 = [tfOps.scalar(2)]; - executeOp(node, {input1, input2}, context); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.cumsum).toHaveBeenCalledWith(input1[0], 2, true, false); + expect(spyOps.cumsum).toHaveBeenCalledWith(input1[0], 2, true, false); }); }); describe('Bincount', () => { it('should call tfOps.bincount', () => { - spyOn(tfOps, 'bincount'); node.op = 'Bincount'; node.inputNames = ['input4', 'input3', 'input2']; node.inputParams.size = createNumberAttrFromIndex(1); @@ -115,9 +118,9 @@ describe('reduction', () => { const input4 = [tfOps.tensor1d([1, 1], 'int32')]; const input3 = [tfOps.scalar(2)]; const input2 = [tfOps.tensor1d([])]; - executeOp(node, {input4, input3, input2}, context); + executeOp(node, {input4, input3, input2}, context, spyOpsAsTfOps); - expect(tfOps.bincount).toHaveBeenCalledWith(input4[0], input2[0], 2); + expect(spyOps.bincount).toHaveBeenCalledWith(input4[0], input2[0], 2); }); it('should match json def for bincount.', () => { node.op = 'Bincount'; @@ -129,7 +132,6 @@ describe('reduction', () => { }); describe('DenseBincount', () => { it('should call tfOps.denseBincount', () => { - spyOn(tfOps, 'denseBincount'); node.op = 'DenseBincount'; node.inputNames = ['input4', 'input3', 'input2']; node.inputParams.x = createTensorAttr(0); @@ -139,9 +141,9 @@ describe('reduction', () => { const input4 = [tfOps.tensor1d([1, 1], 'int32')]; const input3 = [tfOps.scalar(2)]; const input2 = [tfOps.tensor1d([])]; - executeOp(node, {input4, input3, input2}, context); + executeOp(node, {input4, input3, input2}, context, spyOpsAsTfOps); - expect(tfOps.denseBincount) + expect(spyOps.denseBincount) .toHaveBeenCalledWith(input4[0], input2[0], 2, true); }); it('should match json def for denseBincount.', () => { diff --git a/tfjs-converter/src/operations/executors/slice_join_executor.ts b/tfjs-converter/src/operations/executors/slice_join_executor.ts index f2131d06432..e574d614147 100644 --- a/tfjs-converter/src/operations/executors/slice_join_executor.ts +++ b/tfjs-converter/src/operations/executors/slice_join_executor.ts @@ -27,7 +27,7 @@ import {getParamValue} from './utils'; export const executeOp: InternalOpExecutor = (node: Node, tensorMap: NamedTensorsMap, - context: ExecutionContext): Tensor[] => { + context: ExecutionContext, ops = tfOps): Tensor[] => { switch (node.op) { case 'ConcatV2': case 'Concat': { @@ -37,13 +37,13 @@ export const executeOp: InternalOpExecutor = let inputs = getParamValue('tensors', node, tensorMap, context) as Tensor[]; inputs = inputs.slice(0, n); - return [tfOps.concat(inputs, axis)]; + return [ops.concat(inputs, axis)]; } case 'Gather': { const input = getParamValue('x', node, tensorMap, context) as Tensor; const indices = getParamValue('indices', node, tensorMap, context) as Tensor1D; - return [tfOps.gather(input, tfOps.cast(indices, 'int32'), 0)]; + return [ops.gather(input, ops.cast(indices, 'int32'), 0)]; } case 'GatherV2': { const axis = @@ -53,8 +53,8 @@ export const executeOp: InternalOpExecutor = const input = getParamValue('x', node, tensorMap, context) as Tensor; const indices = getParamValue('indices', node, tensorMap, context) as Tensor1D; - return [tfOps.gather( - input, tfOps.cast(indices, 'int32'), axis, batchDims)]; + return [ops.gather( + input, ops.cast(indices, 'int32'), axis, batchDims)]; } case 'Reverse': { const dims = @@ -66,20 +66,20 @@ export const executeOp: InternalOpExecutor = } } const input = getParamValue('x', node, tensorMap, context) as Tensor; - return [tfOps.reverse(input, axis)]; + return [ops.reverse(input, axis)]; } case 'ReverseV2': { const axis = getParamValue('axis', node, tensorMap, context) as number[]; const input = getParamValue('x', node, tensorMap, context) as Tensor; - return [tfOps.reverse(input, axis)]; + return [ops.reverse(input, axis)]; } case 'Slice': { // tslint:disable-next-line:no-any const begin = getParamValue('begin', node, tensorMap, context) as any; // tslint:disable-next-line:no-any const size = getParamValue('size', node, tensorMap, context) as any; - return [tfOps.slice( + return [ops.slice( getParamValue('x', node, tensorMap, context) as Tensor, begin, size)]; } @@ -103,7 +103,7 @@ export const executeOp: InternalOpExecutor = number; const tensor = getParamValue('x', node, tensorMap, context) as Tensor; - return [tfOps.stridedSlice( + return [ops.stridedSlice( tensor, begin, end, strides, beginMask, endMask, ellipsisMask, newAxisMask, shrinkAxisMask)]; } @@ -116,17 +116,17 @@ export const executeOp: InternalOpExecutor = // Reshape the tensors to the first tensor's shape if they don't // match. const shape = tensors[0].shape; - const squeezedShape = tfOps.squeeze(tensors[0]).shape; + const squeezedShape = ops.squeeze(tensors[0]).shape; const mapped = tensors.map(tensor => { const sameShape = util.arraysEqual(tensor.shape, shape); if (!sameShape && !util.arraysEqual( - tfOps.squeeze(tensor).shape, squeezedShape)) { + ops.squeeze(tensor).shape, squeezedShape)) { throw new Error('the input tensors shape does not match'); } - return sameShape ? tensor : tfOps.reshape(tensor, shape); + return sameShape ? tensor : ops.reshape(tensor, shape); }); - return [tfOps.stack(mapped, axis)]; + return [ops.stack(mapped, axis)]; }); } case 'Unpack': { @@ -134,12 +134,12 @@ export const executeOp: InternalOpExecutor = getParamValue('axis', node, tensorMap, context) as number; const tensor = getParamValue('tensor', node, tensorMap, context) as Tensor; - return tfOps.unstack(tensor, axis); + return ops.unstack(tensor, axis); } case 'Tile': { const reps = getParamValue('reps', node, tensorMap, context) as number[]; - return [tfOps.tile( + return [ops.tile( getParamValue('x', node, tensorMap, context) as Tensor, reps)]; } case 'Split': @@ -152,7 +152,7 @@ export const executeOp: InternalOpExecutor = number[]; const tensor = getParamValue('x', node, tensorMap, context) as Tensor; - return tfOps.split(tensor, numOrSizeSplits, axis); + return ops.split(tensor, numOrSizeSplits, axis); } case 'ScatterNd': { const indices = @@ -161,13 +161,13 @@ export const executeOp: InternalOpExecutor = getParamValue('values', node, tensorMap, context) as Tensor; const shape = getParamValue('shape', node, tensorMap, context) as number[]; - return [tfOps.scatterND(indices, values, shape)]; + return [ops.scatterND(indices, values, shape)]; } case 'GatherNd': { const x = getParamValue('x', node, tensorMap, context) as Tensor; const indices = getParamValue('indices', node, tensorMap, context) as Tensor; - return [tfOps.gatherND(x, indices)]; + return [ops.gatherND(x, indices)]; } case 'SparseToDense': { const indices = @@ -180,11 +180,11 @@ export const executeOp: InternalOpExecutor = getParamValue('sparseValues', node, tensorMap, context) as Tensor; const defaultValue = getParamValue('defaultValue', node, tensorMap, context) as Scalar; - return [tfOps.sparseToDense( + return [ops.sparseToDense( indices, sparseValues, shape, sparseValues.dtype === defaultValue.dtype ? defaultValue : - tfOps.cast(defaultValue, sparseValues.dtype))]; + ops.cast(defaultValue, sparseValues.dtype))]; } default: throw TypeError(`Node type ${node.op} is not implemented`); diff --git a/tfjs-converter/src/operations/executors/slice_join_executor_test.ts b/tfjs-converter/src/operations/executors/slice_join_executor_test.ts index 6ea6249aab4..7545b251ab5 100644 --- a/tfjs-converter/src/operations/executors/slice_join_executor_test.ts +++ b/tfjs-converter/src/operations/executors/slice_join_executor_test.ts @@ -22,6 +22,7 @@ import * as slice_join from '../op_list/slice_join'; import {Node} from '../types'; import {executeOp} from './slice_join_executor'; +import {RecursiveSpy, spyOnAllFunctions} from './spy_ops'; import {createBooleanArrayAttrFromIndex, createNumberAttr, createNumberAttrFromIndex, createNumericArrayAttrFromIndex, createTensorAttr, createTensorsAttr, validateParam} from './test_helper'; describe('slice join', () => { @@ -32,6 +33,13 @@ describe('slice join', () => { const input4 = [tfOps.tensor1d([3])]; const input5 = [tfOps.tensor1d([3, 4])]; const context = new ExecutionContext({}, {}, {}); + let spyOps: RecursiveSpy; + let spyOpsAsTfOps: typeof tfOps; + + beforeEach(() => { + spyOps = spyOnAllFunctions(tfOps); + spyOpsAsTfOps = spyOps as unknown as typeof tfOps; + }); describe('multi-tensor ops', () => { beforeEach(() => { @@ -48,24 +56,24 @@ describe('slice join', () => { }); describe('executeOp', () => { it('Concat', () => { - const spy = spyOn(tfOps, 'concat'); node.op = 'Concat'; node.inputParams.tensors = createTensorsAttr(1, 0); node.inputParams.axis = createNumberAttrFromIndex(0); node.attrParams.n = createNumberAttr(2); - executeOp(node, {input1, input2, input3}, context); + spyOps.concat.and.returnValue({}); + executeOp(node, {input1, input2, input3}, context, spyOpsAsTfOps); - expect(spy).toHaveBeenCalledWith([input2[0], input3[0]], 1); + expect(spyOps.concat).toHaveBeenCalledWith([input2[0], input3[0]], 1); }); it('Concat when input length and n mismatch', () => { - const spy = spyOn(tfOps, 'concat'); node.op = 'Concat'; node.inputParams.tensors = createTensorsAttr(0, -1); node.inputParams.axis = createNumberAttrFromIndex(-1); node.attrParams.n = createNumberAttr(1); - executeOp(node, {input1, input2, input3}, context); + spyOps.concat.and.returnValue({}); + executeOp(node, {input1, input2, input3}, context, spyOpsAsTfOps); - expect(spy).toHaveBeenCalledWith([input1[0]], 3); + expect(spyOps.concat).toHaveBeenCalledWith([input1[0]], 3); }); it('should match json def for Concat', () => { node.op = 'Concat'; @@ -76,24 +84,24 @@ describe('slice join', () => { expect(validateParam(node, slice_join.json, 'Concat')).toBeTruthy(); }); it('ConcatV2', () => { - const spy = spyOn(tfOps, 'concat'); node.op = 'ConcatV2'; node.inputParams.tensors = createTensorsAttr(0, -1); node.inputParams.axis = createNumberAttrFromIndex(-1); node.attrParams.n = createNumberAttr(2); - executeOp(node, {input1, input2, input3}, context); + spyOps.concat.and.returnValue({}); + executeOp(node, {input1, input2, input3}, context, spyOpsAsTfOps); - expect(spy).toHaveBeenCalledWith([input1[0], input2[0]], 3); + expect(spyOps.concat).toHaveBeenCalledWith([input1[0], input2[0]], 3); }); it('ConcatV2 when input length and n mismatch', () => { - const spy = spyOn(tfOps, 'concat'); node.op = 'ConcatV2'; node.inputParams.tensors = createTensorsAttr(0, -1); node.inputParams.axis = createNumberAttrFromIndex(-1); node.attrParams.n = createNumberAttr(1); - executeOp(node, {input1, input2, input3}, context); + spyOps.concat.and.returnValue({}); + executeOp(node, {input1, input2, input3}, context, spyOpsAsTfOps); - expect(spy).toHaveBeenCalledWith([input1[0]], 3); + expect(spyOps.concat).toHaveBeenCalledWith([input1[0]], 3); }); it('should match json def for ConcatV2', () => { node.op = 'ConcatV2'; @@ -104,13 +112,14 @@ describe('slice join', () => { expect(validateParam(node, slice_join.json, 'ConcatV2')).toBeTruthy(); }); it('should call tfOps.unstack', () => { - const spy = spyOn(tfOps, 'unstack'); node.op = 'Unpack'; node.inputParams.tensor = createTensorAttr(0); node.attrParams.axis = createNumberAttr(4); - executeOp(node, {input1}, context); + spyOps.unstack.and.returnValue({}); + + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(spy).toHaveBeenCalledWith(input1[0], 4); + expect(spyOps.unstack).toHaveBeenCalledWith(input1[0], 4); }); it('should match json def for unstack', () => { node.op = 'Unpack'; @@ -120,16 +129,16 @@ describe('slice join', () => { expect(validateParam(node, slice_join.json)).toBeTruthy(); }); it('should call tfOps.stack', () => { - const spy = spyOn(tfOps, 'stack'); node.op = 'Pack'; node.inputParams.tensors = createTensorsAttr(0, 0); node.attrParams.axis = createNumberAttr(4); - executeOp(node, {input1, input2, input3}, context); + spyOps.stack.and.returnValue({}); + executeOp(node, {input1, input2, input3}, context, spyOpsAsTfOps); - expect(spy.calls.mostRecent().args[0][0]).toEqual(input1[0]); - expect(spy.calls.mostRecent().args[0][1]).toEqual(input2[0]); - expect(spy.calls.mostRecent().args[0][2]).toEqual(input3[0]); - expect(spy.calls.mostRecent().args[1]).toEqual(4); + expect(spyOps.stack.calls.mostRecent().args[0][0]).toEqual(input1[0]); + expect(spyOps.stack.calls.mostRecent().args[0][1]).toEqual(input2[0]); + expect(spyOps.stack.calls.mostRecent().args[0][2]).toEqual(input3[0]); + expect(spyOps.stack.calls.mostRecent().args[1]).toEqual(4); }); it('should match json def for unstack', () => { node.op = 'Pack'; @@ -139,18 +148,19 @@ describe('slice join', () => { expect(validateParam(node, slice_join.json)).toBeTruthy(); }); it('should reshape tensors for tfOps.stack', () => { - const spy = spyOn(tfOps, 'stack'); node.op = 'Pack'; node.inputNames = ['input1', 'input2', 'input3', 'input4']; node.inputParams.tensors = createTensorsAttr(0, 0); node.attrParams.axis = createNumberAttr(4); - executeOp(node, {input1, input2, input3, input4}, context); + spyOps.stack.and.returnValue({}); + executeOp( + node, {input1, input2, input3, input4}, context, spyOpsAsTfOps); - expect(spy.calls.mostRecent().args[0][0]).toEqual(input1[0]); - expect(spy.calls.mostRecent().args[0][1]).toEqual(input2[0]); - expect(spy.calls.mostRecent().args[0][2]).toEqual(input3[0]); - expect(spy.calls.mostRecent().args[0][3].shape).toEqual([]); - expect(spy.calls.mostRecent().args[1]).toEqual(4); + expect(spyOps.stack.calls.mostRecent().args[0][0]).toEqual(input1[0]); + expect(spyOps.stack.calls.mostRecent().args[0][1]).toEqual(input2[0]); + expect(spyOps.stack.calls.mostRecent().args[0][2]).toEqual(input3[0]); + expect(spyOps.stack.calls.mostRecent().args[0][3].shape).toEqual([]); + expect(spyOps.stack.calls.mostRecent().args[1]).toEqual(4); }); it('should raise error if tensors shape does not match for tfOps.stack', () => { @@ -179,14 +189,14 @@ describe('slice join', () => { }); describe('executeOp', () => { it('should call tfOps.reverse', () => { - spyOn(tfOps, 'reverse'); node.op = 'Reverse'; node.inputParams.dims = createBooleanArrayAttrFromIndex(1); node.inputNames = ['input1', 'input6']; const input6 = [tfOps.tensor1d([false, true], 'bool')]; - executeOp(node, {input1, input6}, context); + spyOps.reverse.and.returnValue({}); + executeOp(node, {input1, input6}, context, spyOpsAsTfOps); - expect(tfOps.reverse).toHaveBeenCalledWith(input1[0], [1]); + expect(spyOps.reverse).toHaveBeenCalledWith(input1[0], [1]); }); it('should match json def for reverse', () => { node.op = 'Reverse'; @@ -195,13 +205,13 @@ describe('slice join', () => { expect(validateParam(node, slice_join.json, 'Reverse')).toBeTruthy(); }); it('should call tfOps.reverse', () => { - spyOn(tfOps, 'reverse'); node.op = 'ReverseV2'; node.inputParams.axis = createNumericArrayAttrFromIndex(1); node.inputNames = ['input1', 'input4']; - executeOp(node, {input1, input4}, context); + spyOps.reverse.and.returnValue({}); + executeOp(node, {input1, input4}, context, spyOpsAsTfOps); - expect(tfOps.reverse).toHaveBeenCalledWith(input1[0], [3]); + expect(spyOps.reverse).toHaveBeenCalledWith(input1[0], [3]); }); it('should match json def for reverse', () => { node.op = 'ReverseV2'; @@ -210,13 +220,13 @@ describe('slice join', () => { expect(validateParam(node, slice_join.json, 'ReverseV2')).toBeTruthy(); }); it('should call tfOps.tile', () => { - spyOn(tfOps, 'tile'); node.op = 'Tile'; node.inputParams.reps = createNumericArrayAttrFromIndex(1); node.inputNames = ['input1', 'input4']; - executeOp(node, {input1, input4}, context); + spyOps.tile.and.returnValue({}); + executeOp(node, {input1, input4}, context, spyOpsAsTfOps); - expect(tfOps.tile).toHaveBeenCalledWith(input1[0], [3]); + expect(spyOps.tile).toHaveBeenCalledWith(input1[0], [3]); }); it('should match json def for tile', () => { node.op = 'Tile'; @@ -225,16 +235,16 @@ describe('slice join', () => { expect(validateParam(node, slice_join.json)).toBeTruthy(); }); it('should call tfOps.slice', () => { - spyOn(tfOps, 'slice'); node.op = 'Slice'; node.inputParams.begin = createNumericArrayAttrFromIndex(1); node.inputParams.size = createNumericArrayAttrFromIndex(2); const input6 = [tfOps.tensor1d([2], 'int32')]; node.inputNames = ['input1', 'input6', 'input4']; + spyOps.slice.and.returnValue({}); - executeOp(node, {input1, input6, input4}, context); + executeOp(node, {input1, input6, input4}, context, spyOpsAsTfOps); - expect(tfOps.slice).toHaveBeenCalledWith(input1[0], [2], [3]); + expect(spyOps.slice).toHaveBeenCalledWith(input1[0], [2], [3]); }); it('should match json def for slice', () => { node.op = 'Slice'; @@ -244,7 +254,6 @@ describe('slice join', () => { expect(validateParam(node, slice_join.json)).toBeTruthy(); }); it('should call tfOps.stridedSlice', () => { - spyOn(tfOps, 'stridedSlice'); node.op = 'StridedSlice'; node.inputParams.begin = createNumericArrayAttrFromIndex(1); node.inputParams.end = createNumericArrayAttrFromIndex(2); @@ -257,9 +266,10 @@ describe('slice join', () => { node.inputNames = ['input1', 'input6', 'input7', 'input4']; const input6 = [tfOps.tensor1d([2], 'int32')]; const input7 = [tfOps.tensor1d([3], 'int32')]; - executeOp(node, {input1, input6, input7, input4}, context); + executeOp( + node, {input1, input6, input7, input4}, context, spyOpsAsTfOps); - expect(tfOps.stridedSlice) + expect(spyOps.stridedSlice) .toHaveBeenCalledWith(input1[0], [2], [3], [3], 4, 5, 1, 2, 3); }); it('should match json def for stridedSlice', () => { @@ -276,14 +286,14 @@ describe('slice join', () => { expect(validateParam(node, slice_join.json)).toBeTruthy(); }); it('should call tfOps.gather', () => { - spyOn(tfOps, 'gather'); node.op = 'Gather'; node.inputParams.indices = createTensorAttr(1); const input5 = [tfOps.scalar(2, 'int32')]; node.inputNames = ['input1', 'input5']; - executeOp(node, {input1, input5, input3}, context); + spyOps.gather.and.returnValue({}); + executeOp(node, {input1, input5, input3}, context, spyOpsAsTfOps); - expect(tfOps.gather) + expect(spyOps.gather) .toHaveBeenCalledWith( input1[0], jasmine.objectContaining({dataId: input5[0].dataId}), 0); @@ -295,30 +305,30 @@ describe('slice join', () => { expect(validateParam(node, slice_join.json, 'Gather')).toBeTruthy(); }); it('should call tfOps.gather', () => { - spyOn(tfOps, 'gather'); node.op = 'GatherV2'; node.inputParams.indices = createTensorAttr(1); node.inputParams.axis = createNumberAttrFromIndex(2); node.attrParams.batchDims = createNumberAttr(1); const input5 = [tfOps.scalar(2, 'int32')]; node.inputNames = ['input1', 'input5', 'input3']; - executeOp(node, {input1, input5, input3}, context); + spyOps.gather.and.returnValue({}); + executeOp(node, {input1, input5, input3}, context, spyOpsAsTfOps); - expect(tfOps.gather) + expect(spyOps.gather) .toHaveBeenCalledWith( input1[0], jasmine.objectContaining({dataId: input5[0].dataId}), 3, 1); }); it('should make indices param of int32 dtype', () => { - spyOn(tfOps, 'gather'); node.op = 'Gather'; node.inputParams.indices = createTensorAttr(1); node.inputNames = ['input1', 'input5']; const input5 = [tfOps.scalar(2, 'float32')]; - executeOp(node, {input1, input5}, context); + spyOps.gather.and.returnValue({}); + executeOp(node, {input1, input5}, context, spyOpsAsTfOps); - expect(tfOps.gather) + expect(spyOps.gather) .toHaveBeenCalledWith( input1[0], jasmine.objectContaining({dtype: 'int32'}), 0); }); @@ -331,15 +341,15 @@ describe('slice join', () => { expect(validateParam(node, slice_join.json, 'GatherV2')).toBeTruthy(); }); it('should call tfOps.split', () => { - spyOn(tfOps, 'split'); node.op = 'Split'; node.inputParams.axis = createNumberAttrFromIndex(0); node.inputParams.x = createTensorAttr(1); node.attrParams.numOrSizeSplits = createNumberAttr(2); node.inputNames = ['input1', 'input2']; - executeOp(node, {input1, input2}, context); + spyOps.split.and.returnValue({}); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.split).toHaveBeenCalledWith(input2[0], 2, 1); + expect(spyOps.split).toHaveBeenCalledWith(input2[0], 2, 1); }); it('should match json def for split', () => { node.op = 'Split'; @@ -350,15 +360,15 @@ describe('slice join', () => { expect(validateParam(node, slice_join.json, 'Split')).toBeTruthy(); }); it('should call tfOps.split', () => { - spyOn(tfOps, 'split'); node.op = 'SplitV'; node.inputParams.x = createTensorAttr(0); node.inputParams.numOrSizeSplits = createNumericArrayAttrFromIndex(1); node.inputParams.axis = createNumberAttrFromIndex(2); node.inputNames = ['input1', 'input2', 'input3']; - executeOp(node, {input1, input2, input3}, context); + spyOps.split.and.returnValue({}); + executeOp(node, {input1, input2, input3}, context, spyOpsAsTfOps); - expect(tfOps.split).toHaveBeenCalledWith(input1[0], 2, 3); + expect(spyOps.split).toHaveBeenCalledWith(input1[0], 2, 3); }); it('should match json def for split', () => { node.op = 'SplitV'; @@ -369,15 +379,17 @@ describe('slice join', () => { expect(validateParam(node, slice_join.json, 'SplitV')).toBeTruthy(); }); it('should call tfOps.scatterND', () => { - spyOn(tfOps, 'scatterND'); node.op = 'ScatterNd'; node.inputParams.indices = createTensorAttr(0); node.inputParams.values = createTensorAttr(1); node.inputParams.shape = createNumericArrayAttrFromIndex(2); node.inputNames = ['input1', 'input2', 'input4']; - executeOp(node, {input1, input2, input4}, context); + spyOps.scatterND.and.returnValue({}); + executeOp(node, {input1, input2, input4}, context, spyOpsAsTfOps); - expect(tfOps.scatterND).toHaveBeenCalledWith(input1[0], input2[0], [3]); + expect(spyOps.scatterND).toHaveBeenCalledWith(input1[0], input2[0], [ + 3 + ]); }); it('should match json def for scatterND', () => { node.op = 'ScatterNd'; @@ -389,14 +401,14 @@ describe('slice join', () => { expect(validateParam(node, slice_join.json)).toBeTruthy(); }); it('should call tfOps.gatherND', () => { - spyOn(tfOps, 'gatherND'); node.op = 'GatherNd'; node.inputParams.x = createTensorAttr(0); node.inputParams.indices = createTensorAttr(1); node.inputNames = ['input1', 'input2']; - executeOp(node, {input1, input2}, context); + spyOps.gatherND.and.returnValue({}); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.gatherND).toHaveBeenCalledWith(input1[0], input2[0]); + expect(spyOps.gatherND).toHaveBeenCalledWith(input1[0], input2[0]); }); it('should match json def for gatherND', () => { node.op = 'GatherNd'; @@ -406,7 +418,6 @@ describe('slice join', () => { expect(validateParam(node, slice_join.json)).toBeTruthy(); }); it('should call tfOps.sparseToDense', () => { - spyOn(tfOps, 'sparseToDense'); node.op = 'SparseToDense'; node.inputParams.sparseIndices = createTensorAttr(0); node.inputParams.outputShape = createNumericArrayAttrFromIndex(1); @@ -414,13 +425,14 @@ describe('slice join', () => { node.inputParams.defaultValue = createTensorAttr(3); node.inputParams.indices = createTensorAttr(1); node.inputNames = ['input1', 'input4', 'input3', 'input2']; - executeOp(node, {input1, input2, input3, input4}, context); + spyOps.sparseToDense.and.returnValue({}); + executeOp( + node, {input1, input2, input3, input4}, context, spyOpsAsTfOps); - expect(tfOps.sparseToDense) + expect(spyOps.sparseToDense) .toHaveBeenCalledWith(input1[0], input3[0], [3], input2[0]); }); it('should make defaultValue of same dtype as sparseValues', () => { - spyOn(tfOps, 'sparseToDense'); node.op = 'SparseToDense'; node.inputParams.sparseIndices = createTensorAttr(0); node.inputParams.outputShape = createNumericArrayAttrFromIndex(1); @@ -429,9 +441,11 @@ describe('slice join', () => { node.inputParams.indices = createTensorAttr(1); const input5 = [tfOps.scalar(5, 'int32')]; node.inputNames = ['input1', 'input4', 'input3', 'input5']; - executeOp(node, {input1, input5, input3, input4}, context); + spyOps.sparseToDense.and.returnValue({}); + executeOp( + node, {input1, input5, input3, input4}, context, spyOpsAsTfOps); - expect(tfOps.sparseToDense) + expect(spyOps.sparseToDense) .toHaveBeenCalledWith( input1[0], input3[0], [3], jasmine.objectContaining({dtype: 'float32'})); diff --git a/tfjs-converter/src/operations/executors/sparse_executor.ts b/tfjs-converter/src/operations/executors/sparse_executor.ts index a751af82a7b..3cab1601d70 100644 --- a/tfjs-converter/src/operations/executors/sparse_executor.ts +++ b/tfjs-converter/src/operations/executors/sparse_executor.ts @@ -27,7 +27,7 @@ import {getParamValue} from './utils'; export const executeOp: InternalOpExecutor = (node: Node, tensorMap: NamedTensorsMap, - context: ExecutionContext): Tensor[] => { + context: ExecutionContext, ops = tfOps): Tensor[] => { switch (node.op) { case 'SparseFillEmptyRows': { const { @@ -36,7 +36,7 @@ export const executeOp: InternalOpExecutor = emptyRowIndicator, reverseIndexMap } = - tfOps.sparse.sparseFillEmptyRows( + ops.sparse.sparseFillEmptyRows( getParamValue('indices', node, tensorMap, context) as Tensor2D, getParamValue('values', node, tensorMap, context) as Tensor1D, @@ -49,7 +49,7 @@ export const executeOp: InternalOpExecutor = ]; } case 'SparseReshape': { - const {outputIndices, outputShape} = tfOps.sparse.sparseReshape( + const {outputIndices, outputShape} = ops.sparse.sparseReshape( getParamValue('inputIndices', node, tensorMap, context) as Tensor2D, getParamValue('inputShape', node, tensorMap, context) as Tensor1D, @@ -57,7 +57,7 @@ export const executeOp: InternalOpExecutor = return [outputIndices, outputShape]; } case 'SparseSegmentMean': { - const outputData = tfOps.sparse.sparseSegmentMean( + const outputData = ops.sparse.sparseSegmentMean( getParamValue('data', node, tensorMap, context) as Tensor, getParamValue('indices', node, tensorMap, context) as Tensor1D, getParamValue('segmentIds', node, tensorMap, context) as @@ -65,7 +65,7 @@ export const executeOp: InternalOpExecutor = return [outputData]; } case 'SparseSegmentSum': { - const outputData = tfOps.sparse.sparseSegmentSum( + const outputData = ops.sparse.sparseSegmentSum( getParamValue('data', node, tensorMap, context) as Tensor, getParamValue('indices', node, tensorMap, context) as Tensor1D, getParamValue('segmentIds', node, tensorMap, context) as diff --git a/tfjs-converter/src/operations/executors/sparse_executor_test.ts b/tfjs-converter/src/operations/executors/sparse_executor_test.ts index 8d2a3ce376d..f42a3fc07a1 100644 --- a/tfjs-converter/src/operations/executors/sparse_executor_test.ts +++ b/tfjs-converter/src/operations/executors/sparse_executor_test.ts @@ -24,12 +24,17 @@ import {Node} from '../types'; import {executeOp} from './sparse_executor'; import {createTensorAttr, validateParam} from './test_helper'; +import {RecursiveSpy, spyOnAllFunctions} from './spy_ops'; describe('sparse', () => { let node: Node; const context = new ExecutionContext({}, {}, {}); + let spyOps: RecursiveSpy; + let spyOpsAsTfOps: typeof tfOps; beforeEach(() => { + spyOps = spyOnAllFunctions(tfOps); + spyOpsAsTfOps = spyOps as unknown as typeof tfOps; node = { name: 'test', op: '', @@ -45,7 +50,6 @@ describe('sparse', () => { describe('executeOp', () => { describe('SparseFillEmptyRows', () => { it('should call tfOps.sparse.sparseFillEmptyRows', async () => { - spyOn(tfOps.sparse, 'sparseFillEmptyRows').and.callThrough(); node.op = 'SparseFillEmptyRows'; node.inputParams = { indices: createTensorAttr(0), @@ -62,9 +66,9 @@ describe('sparse', () => { const defaultValue = [tfOps.scalar(-1, 'int32')]; const result = executeOp( node, {indices, values, denseShape, defaultValue}, - context) as Tensor[]; + context, spyOpsAsTfOps) as Tensor[]; - expect(tfOps.sparse.sparseFillEmptyRows) + expect(spyOps.sparse.sparseFillEmptyRows) .toHaveBeenCalledWith( indices[0], values[0], denseShape[0], defaultValue[0]); test_util.expectArraysClose( @@ -89,7 +93,6 @@ describe('sparse', () => { }); describe('SparseReshape', () => { it('should call tfOps.sparse.sparseReshape', async () => { - spyOn(tfOps.sparse, 'sparseReshape').and.callThrough(); node.op = 'SparseReshape'; node.inputParams = { inputIndices: createTensorAttr(0), @@ -103,10 +106,10 @@ describe('sparse', () => { const inputShape = [tfOps.tensor1d([2, 3, 6], 'int32')]; const newShape = [tfOps.tensor1d([9, -1], 'int32')]; const result = - executeOp(node, {inputIndices, inputShape, newShape}, context) as - Tensor[]; + executeOp(node, {inputIndices, inputShape, newShape}, context, + spyOpsAsTfOps) as Tensor[]; - expect(tfOps.sparse.sparseReshape) + expect(spyOps.sparse.sparseReshape) .toHaveBeenCalledWith(inputIndices[0], inputShape[0], newShape[0]); test_util.expectArraysClose( await result[0].data(), [0, 0, 0, 1, 1, 2, 4, 2, 8, 1]); @@ -126,7 +129,6 @@ describe('sparse', () => { }); describe('SparseSegmentMean', () => { it('should call tfOps.sparse.sparseSegmentMean', async () => { - spyOn(tfOps.sparse, 'sparseSegmentMean').and.callThrough(); node.op = 'SparseSegmentMean'; node.inputParams = { data: createTensorAttr(0), @@ -140,9 +142,10 @@ describe('sparse', () => { const indices = [tfOps.tensor1d([0, 1, 2], 'int32')]; const segmentIds = [tfOps.tensor1d([0, 1, 1], 'int32')]; const result = - executeOp(node, {data, indices, segmentIds}, context) as Tensor[]; + executeOp(node, {data, indices, segmentIds}, context, + spyOpsAsTfOps) as Tensor[]; - expect(tfOps.sparse.sparseSegmentMean) + expect(spyOps.sparse.sparseSegmentMean) .toHaveBeenCalledWith(data[0], indices[0], segmentIds[0]); test_util.expectArraysClose( await result[0].data(), [1.0, 2.0, 3.0, 4.0, 2.5, 2.5, 2.5, 2.5]); @@ -160,7 +163,6 @@ describe('sparse', () => { }); describe('SparseSegmentSum', () => { it('should call tfOps.sparse.sparseSegmentSum', async () => { - spyOn(tfOps.sparse, 'sparseSegmentSum').and.callThrough(); node.op = 'SparseSegmentSum'; node.inputParams = { data: createTensorAttr(0), @@ -174,9 +176,10 @@ describe('sparse', () => { const indices = [tfOps.tensor1d([0, 1], 'int32')]; const segmentIds = [tfOps.tensor1d([0, 0], 'int32')]; const result = - executeOp(node, {data, indices, segmentIds}, context) as Tensor[]; + executeOp(node, {data, indices, segmentIds}, context, + spyOpsAsTfOps) as Tensor[]; - expect(tfOps.sparse.sparseSegmentSum) + expect(spyOps.sparse.sparseSegmentSum) .toHaveBeenCalledWith(data[0], indices[0], segmentIds[0]); test_util.expectArraysClose(await result[0].data(), [0, 0, 0, 0]); }); diff --git a/tfjs-converter/src/operations/executors/spectral_executor.ts b/tfjs-converter/src/operations/executors/spectral_executor.ts index c0f169772dc..9a1f49b93f4 100644 --- a/tfjs-converter/src/operations/executors/spectral_executor.ts +++ b/tfjs-converter/src/operations/executors/spectral_executor.ts @@ -26,23 +26,23 @@ import {InternalOpExecutor, Node} from '../types'; import {getParamValue} from './utils'; export const executeOp: InternalOpExecutor = - (node: Node, tensorMap: NamedTensorsMap, context: ExecutionContext): - Tensor[] => { + (node: Node, tensorMap: NamedTensorsMap, context: ExecutionContext, + ops = tfOps): Tensor[] => { switch (node.op) { case 'FFT': { - return [tfOps.fft( + return [ops.fft( getParamValue('x', node, tensorMap, context) as Tensor)]; } case 'IFFT': { - return [tfOps.ifft( + return [ops.ifft( getParamValue('x', node, tensorMap, context) as Tensor)]; } case 'RFFT': { - return [tfOps.rfft( + return [ops.rfft( getParamValue('x', node, tensorMap, context) as Tensor)]; } case 'IRFFT': { - return [tfOps.irfft( + return [ops.irfft( getParamValue('x', node, tensorMap, context) as Tensor)]; } default: diff --git a/tfjs-converter/src/operations/executors/spectral_executor_test.ts b/tfjs-converter/src/operations/executors/spectral_executor_test.ts index c9f5dccc0eb..5e4d0a7947e 100644 --- a/tfjs-converter/src/operations/executors/spectral_executor_test.ts +++ b/tfjs-converter/src/operations/executors/spectral_executor_test.ts @@ -23,13 +23,18 @@ import {Node} from '../types'; import {executeOp} from './spectral_executor'; import {createTensorAttr, validateParam} from './test_helper'; +import {RecursiveSpy, spyOnAllFunctions} from './spy_ops'; describe('spectral', () => { let node: Node; const input1 = [tfOps.scalar(1)]; const context = new ExecutionContext({}, {}, {}); + let spyOps: RecursiveSpy; + let spyOpsAsTfOps: typeof tfOps; beforeEach(() => { + spyOps = spyOnAllFunctions(tfOps); + spyOpsAsTfOps = spyOps as unknown as typeof tfOps; node = { name: 'test', op: '', @@ -45,11 +50,11 @@ describe('spectral', () => { describe('executeOp', () => { describe('FFT', () => { it('should call tfOps.fft', () => { - spyOn(tfOps, 'fft'); node.op = 'FFT'; - executeOp(node, {input1}, context); + spyOps.fft.and.returnValue({}); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.fft).toHaveBeenCalledWith(input1[0]); + expect(spyOps.fft).toHaveBeenCalledWith(input1[0]); }); it('should match json def', () => { node.op = 'FFT'; @@ -59,11 +64,11 @@ describe('spectral', () => { }); describe('IFFT', () => { it('should call tfOps.ifft', () => { - spyOn(tfOps, 'ifft'); node.op = 'IFFT'; - executeOp(node, {input1}, context); + spyOps.ifft.and.returnValue({}); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.ifft).toHaveBeenCalledWith(input1[0]); + expect(spyOps.ifft).toHaveBeenCalledWith(input1[0]); }); it('should match json def', () => { node.op = 'IFFT'; @@ -73,11 +78,11 @@ describe('spectral', () => { }); describe('RFFT', () => { it('should call tfOps.rfft', () => { - spyOn(tfOps, 'rfft'); node.op = 'RFFT'; - executeOp(node, {input1}, context); + spyOps.rfft.and.returnValue({}); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.rfft).toHaveBeenCalledWith(input1[0]); + expect(spyOps.rfft).toHaveBeenCalledWith(input1[0]); }); it('should match json def', () => { node.op = 'RFFT'; @@ -87,11 +92,11 @@ describe('spectral', () => { }); describe('IRFFT', () => { it('should call tfOps.irfft', () => { - spyOn(tfOps, 'irfft'); node.op = 'IRFFT'; - executeOp(node, {input1}, context); + spyOps.irfft.and.returnValue({}); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.irfft).toHaveBeenCalledWith(input1[0]); + expect(spyOps.irfft).toHaveBeenCalledWith(input1[0]); }); it('should match json def', () => { node.op = 'IRFFT'; diff --git a/tfjs-converter/src/operations/executors/spy_ops.ts b/tfjs-converter/src/operations/executors/spy_ops.ts new file mode 100644 index 00000000000..12f772533a7 --- /dev/null +++ b/tfjs-converter/src/operations/executors/spy_ops.ts @@ -0,0 +1,17 @@ +export type RecursiveSpy = T extends Function ? jasmine.Spy : { + [K in keyof T]: RecursiveSpy +}; + +export function spyOnAllFunctions(obj: T): RecursiveSpy { + return Object.fromEntries( + Object.entries(obj).map(([key, val]) => { + if (val instanceof Function) { + return [key, jasmine.createSpy(`${key} spy`, val).and.callThrough()]; + } else if (val instanceof Array) { + return [key, val]; + } else if (val instanceof Object) { + return [key, spyOnAllFunctions(val)]; + } + return [key, val]; + })) as RecursiveSpy; +} diff --git a/tfjs-converter/src/operations/executors/string_executor.ts b/tfjs-converter/src/operations/executors/string_executor.ts index 8fa5904c9f2..e57e2107047 100644 --- a/tfjs-converter/src/operations/executors/string_executor.ts +++ b/tfjs-converter/src/operations/executors/string_executor.ts @@ -27,10 +27,10 @@ import {getParamValue} from './utils'; export const executeOp: InternalOpExecutor = (node: Node, tensorMap: NamedTensorsMap, - context: ExecutionContext): Tensor[] => { + context: ExecutionContext, ops = tfOps): Tensor[] => { switch (node.op) { case 'StringNGrams': { - const {nGrams, nGramsSplits} = tfOps.string.stringNGrams( + const {nGrams, nGramsSplits} = ops.string.stringNGrams( getParamValue('data', node, tensorMap, context) as Tensor1D, getParamValue('dataSplits', node, tensorMap, context) as Tensor, getParamValue('separator', node, tensorMap, context) as string, @@ -45,14 +45,14 @@ export const executeOp: InternalOpExecutor = return [nGrams, nGramsSplits]; } case 'StringSplit': { - const {indices, values, shape} = tfOps.string.stringSplit( + const {indices, values, shape} = ops.string.stringSplit( getParamValue('input', node, tensorMap, context) as Tensor1D, getParamValue('delimiter', node, tensorMap, context) as Scalar, getParamValue('skipEmpty', node, tensorMap, context) as boolean); return [indices, values, shape]; } case 'StringToHashBucketFast': { - const output = tfOps.string.stringToHashBucketFast( + const output = ops.string.stringToHashBucketFast( getParamValue('input', node, tensorMap, context) as Tensor, getParamValue('numBuckets', node, tensorMap, context) as number); return [output]; diff --git a/tfjs-converter/src/operations/executors/string_executor_test.ts b/tfjs-converter/src/operations/executors/string_executor_test.ts index 5378216f19e..492d0165953 100644 --- a/tfjs-converter/src/operations/executors/string_executor_test.ts +++ b/tfjs-converter/src/operations/executors/string_executor_test.ts @@ -24,12 +24,17 @@ import {Node} from '../types'; import {executeOp} from './string_executor'; import {createBoolAttr, createNumberAttr, createNumericArrayAttr, createStrAttr, createTensorAttr, validateParam} from './test_helper'; +import {RecursiveSpy, spyOnAllFunctions} from './spy_ops'; describe('string', () => { let node: Node; const context = new ExecutionContext({}, {}, {}); + let spyOps: RecursiveSpy; + let spyOpsAsTfOps: typeof tfOps; beforeEach(() => { + spyOps = spyOnAllFunctions(tfOps); + spyOpsAsTfOps = spyOps as unknown as typeof tfOps; node = { name: 'test', op: '', @@ -46,7 +51,6 @@ describe('string', () => { describe('executeOp', () => { describe('StringNGrams', () => { it('should call tfOps.string.stringNGrams', async () => { - spyOn(tfOps.string, 'stringNGrams').and.callThrough(); node.op = 'StringNGrams'; node.inputParams = { data: createTensorAttr(0), @@ -65,9 +69,10 @@ describe('string', () => { const data = [tfOps.tensor1d(['a', 'b', 'c', 'd', 'e', 'f'], 'string')]; const dataSplits = [tfOps.tensor1d([0, 4, 6], 'int32')]; - const result = executeOp(node, {data, dataSplits}, context) as Tensor[]; + const result = executeOp(node, {data, dataSplits}, context, + spyOpsAsTfOps) as Tensor[]; - expect(tfOps.string.stringNGrams) + expect(spyOps.string.stringNGrams) .toHaveBeenCalledWith( data[0], dataSplits[0], '|', [3], 'LP', 'RP', -1, false); test_util.expectArraysEqual(await result[0].data(), [ @@ -88,7 +93,6 @@ describe('string', () => { }); describe('StringSplit', () => { it('should call tfOps.string.stringSplit', async () => { - spyOn(tfOps.string, 'stringSplit').and.callThrough(); node.op = 'StringSplit'; node.inputParams = { input: createTensorAttr(0), @@ -100,9 +104,10 @@ describe('string', () => { const input = [tfOps.tensor1d(['#a', 'b#', '#c#'], 'string')]; const delimiter = [tfOps.scalar('#', 'string')]; - const result = executeOp(node, {input, delimiter}, context) as Tensor[]; + const result = executeOp(node, {input, delimiter}, context, + spyOpsAsTfOps) as Tensor[]; - expect(tfOps.string.stringSplit) + expect(spyOps.string.stringSplit) .toHaveBeenCalledWith(input[0], delimiter[0], false); test_util.expectArraysEqual( await result[0].data(), [0, 0, 0, 1, 1, 0, 1, 1, 2, 0, 2, 1, 2, 2]); @@ -123,16 +128,16 @@ describe('string', () => { }); describe('StringToHashBucketFast', () => { it('should call tfOps.string.stringToHashBucketFast', async () => { - spyOn(tfOps.string, 'stringToHashBucketFast').and.callThrough(); node.op = 'StringToHashBucketFast'; node.inputParams = {input: createTensorAttr(0)}; node.attrParams = {numBuckets: createNumberAttr(10)}; node.inputNames = ['input']; const input = [tfOps.tensor1d(['a', 'b', 'c', 'd'], 'string')]; - const result = executeOp(node, {input}, context) as Tensor[]; + const result = executeOp(node, {input}, context, + spyOpsAsTfOps) as Tensor[]; - expect(tfOps.string.stringToHashBucketFast) + expect(spyOps.string.stringToHashBucketFast) .toHaveBeenCalledWith(input[0], 10); test_util.expectArraysClose(await result[0].data(), [9, 2, 2, 5]); }); diff --git a/tfjs-converter/src/operations/executors/test_helper.ts b/tfjs-converter/src/operations/executors/test_helper.ts index f4153e1d12c..25a004fcc5e 100644 --- a/tfjs-converter/src/operations/executors/test_helper.ts +++ b/tfjs-converter/src/operations/executors/test_helper.ts @@ -14,6 +14,7 @@ * limitations under the License. * ============================================================================= */ + import {InputParamValue, OpMapper, ParamValue} from '../types'; import {Node} from '../types'; @@ -94,3 +95,7 @@ export function validateParam( } return matched; } + +export function uncapitalize(name: Name): Uncapitalize { + return name.charAt(0).toLowerCase() + name.slice(1) as Uncapitalize; +} diff --git a/tfjs-converter/src/operations/executors/transformation_executor.ts b/tfjs-converter/src/operations/executors/transformation_executor.ts index 0b0083bf062..80833468e3d 100644 --- a/tfjs-converter/src/operations/executors/transformation_executor.ts +++ b/tfjs-converter/src/operations/executors/transformation_executor.ts @@ -27,10 +27,10 @@ import {getParamValue} from './utils'; export const executeOp: InternalOpExecutor = (node: Node, tensorMap: NamedTensorsMap, - context: ExecutionContext): Tensor[] => { + context: ExecutionContext, ops = tfOps): Tensor[] => { switch (node.op) { case 'Cast': { - return [tfOps.cast( + return [ops.cast( getParamValue('x', node, tensorMap, context) as Tensor, getParamValue('dtype', node, tensorMap, context) as 'int32' | 'float32' | 'bool')]; @@ -38,23 +38,23 @@ export const executeOp: InternalOpExecutor = case 'ExpandDims': { const axis = getParamValue('axis', node, tensorMap, context) as number; - return [tfOps.expandDims( + return [ops.expandDims( getParamValue('x', node, tensorMap, context) as Tensor, axis)]; } case 'Squeeze': { const axis = getParamValue('axis', node, tensorMap, context) as number[]; - return [tfOps.squeeze( + return [ops.squeeze( getParamValue('x', node, tensorMap, context) as Tensor, axis)]; } case 'Reshape': { - return [tfOps.reshape( + return [ops.reshape( getParamValue('x', node, tensorMap, context) as Tensor, getParamValue('shape', node, tensorMap, context) as number[])]; } case 'MirrorPad': { - return [tfOps.mirrorPad( + return [ops.mirrorPad( getParamValue('x', node, tensorMap, context) as Tensor, getParamValue('padding', node, tensorMap, context) as Array<[number, number]>, @@ -63,7 +63,7 @@ export const executeOp: InternalOpExecutor = } case 'PadV2': case 'Pad': { - return [tfOps.pad( + return [ops.pad( getParamValue('x', node, tensorMap, context) as Tensor, getParamValue('padding', node, tensorMap, context) as Array<[number, number]>, @@ -75,7 +75,7 @@ export const executeOp: InternalOpExecutor = getParamValue('blockShape', node, tensorMap, context) as number[]; const paddings = getParamValue('paddings', node, tensorMap, context) as number[][]; - return [tfOps.spaceToBatchND( + return [ops.spaceToBatchND( getParamValue('x', node, tensorMap, context) as Tensor, blockShape, paddings)]; } @@ -84,7 +84,7 @@ export const executeOp: InternalOpExecutor = getParamValue('blockShape', node, tensorMap, context) as number[]; const crops = getParamValue('crops', node, tensorMap, context) as number[][]; - return [tfOps.batchToSpaceND( + return [ops.batchToSpaceND( getParamValue('x', node, tensorMap, context) as Tensor, blockShape, crops)]; } @@ -95,17 +95,17 @@ export const executeOp: InternalOpExecutor = (getParamValue('dataFormat', node, tensorMap, context) as string).toUpperCase() as 'NHWC' | 'NCHW'; - return [tfOps.depthToSpace( + return [ops.depthToSpace( getParamValue('x', node, tensorMap, context) as Tensor4D, blockSize, dataFormat)]; } case 'BroadcastTo': { - return [tfOps.broadcastTo( + return [ops.broadcastTo( getParamValue('x', node, tensorMap, context) as Tensor, getParamValue('shape', node, tensorMap, context) as number[])]; } case 'BroadcastArgs': { - return [tfOps.broadcastArgs( + return [ops.broadcastArgs( getParamValue('s0', node, tensorMap, context) as Tensor, getParamValue('s1', node, tensorMap, context) as Tensor)]; } diff --git a/tfjs-converter/src/operations/executors/transformation_executor_test.ts b/tfjs-converter/src/operations/executors/transformation_executor_test.ts index 3ff0fe67006..94e658f9cf0 100644 --- a/tfjs-converter/src/operations/executors/transformation_executor_test.ts +++ b/tfjs-converter/src/operations/executors/transformation_executor_test.ts @@ -21,6 +21,7 @@ import {ExecutionContext} from '../../executor/execution_context'; import {Node} from '../types'; import {createDtypeAttr, createNumberAttr, createNumericArrayAttrFromIndex, createStrAttr, createTensorAttr} from './test_helper'; import {executeOp} from './transformation_executor'; +import {RecursiveSpy, spyOnAllFunctions} from './spy_ops'; describe('transformation', () => { let node: Node; @@ -42,156 +43,161 @@ describe('transformation', () => { }); describe('executeOp', () => { + let spyOps: RecursiveSpy; + let spyOpsAsTfOps: typeof tfOps; + + beforeEach(() => { + spyOps = spyOnAllFunctions(tfOps); + spyOpsAsTfOps = spyOps as unknown as typeof tfOps; + }); + describe('Cast', () => { it('should call tfOps.cast', () => { - spyOn(tfOps, 'cast'); node.op = 'Cast'; node.attrParams.dtype = createDtypeAttr('float32'); - executeOp(node, {input1}, context); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.cast).toHaveBeenCalledWith(input1[0], 'float32'); + expect(spyOps.cast).toHaveBeenCalledWith(input1[0], 'float32'); }); }); - describe('expandDExpandDimsims', () => { + describe('ExpandDims', () => { it('should call tfOps.expandDims', () => { - spyOn(tfOps, 'expandDims'); node.op = 'ExpandDims'; node.attrParams.axis = createNumberAttr(1); - executeOp(node, {input1}, context); + spyOps.expandDims.and.returnValue({}); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.expandDims).toHaveBeenCalledWith(input1[0], 1); + expect(spyOps.expandDims).toHaveBeenCalledWith(input1[0], 1); }); }); describe('MirrorPad', () => { it('should call tfc.mirrorPad', () => { - spyOn(tfOps, 'mirrorPad'); node.op = 'MirrorPad'; node.inputParams.padding = createNumericArrayAttrFromIndex(1); node.attrParams.mode = createStrAttr('reflect'); node.inputNames = ['input1', 'input3']; const input3 = [tfOps.tensor2d([1, 1, 2, 2], [2, 2])]; - executeOp(node, {input1, input3}, context); + spyOps.mirrorPad.and.returnValue({}); + executeOp(node, {input1, input3}, context, spyOpsAsTfOps); - expect(tfOps.mirrorPad) + expect(spyOps.mirrorPad) .toHaveBeenCalledWith(input1[0], [[1, 1], [2, 2]], 'reflect'); }); }); describe('Pad', () => { it('should call tfOps.pad', () => { - spyOn(tfOps, 'pad'); node.op = 'Pad'; node.inputParams.padding = createNumericArrayAttrFromIndex(1); node.attrParams.constantValue = createNumberAttr(1); node.inputNames = ['input1', 'input3']; const input3 = [tfOps.tensor2d([1, 1, 2, 2], [2, 2])]; - executeOp(node, {input1, input3}, context); + spyOps.pad.and.returnValue({}); + executeOp(node, {input1, input3}, context, spyOpsAsTfOps); - expect(tfOps.pad).toHaveBeenCalledWith(input1[0], [[1, 1], [2, 2]], 1); + expect(spyOps.pad).toHaveBeenCalledWith(input1[0], [[1, 1], [2, 2]], 1); }); }); describe('PadV2', () => { it('should call tfOps.pad', () => { - spyOn(tfOps, 'pad'); node.op = 'PadV2'; node.inputParams.padding = createNumericArrayAttrFromIndex(1); node.attrParams.constantValue = createNumberAttr(1); node.inputNames = ['input1', 'input3']; const input3 = [tfOps.tensor2d([1, 1, 2, 2], [2, 2])]; - executeOp(node, {input1, input3}, context); + spyOps.pad.and.returnValue({}); + executeOp(node, {input1, input3}, context, spyOpsAsTfOps); - expect(tfOps.pad).toHaveBeenCalledWith(input1[0], [[1, 1], [2, 2]], 1); + expect(spyOps.pad).toHaveBeenCalledWith(input1[0], [[1, 1], [2, 2]], 1); }); }); describe('Reshape', () => { it('should call tfOps.reshape', () => { - spyOn(tfOps, 'reshape'); node.op = 'Reshape'; node.inputParams.shape = createNumericArrayAttrFromIndex(1); node.inputNames = ['input1', 'input2']; - executeOp(node, {input1, input2}, context); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.reshape).toHaveBeenCalledWith(input1[0], [1, 1]); + expect(spyOps.reshape).toHaveBeenCalledWith(input1[0], [1, 1]); }); }); describe('Squeeze', () => { it('should call tfOps.squeeze', () => { - spyOn(tfOps, 'squeeze'); node.op = 'Squeeze'; node.attrParams.axis = createNumberAttr(1); - executeOp(node, {input1}, context); + spyOps.squeeze.and.returnValue({}); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.squeeze).toHaveBeenCalledWith(input1[0], 1); + expect(spyOps.squeeze).toHaveBeenCalledWith(input1[0], 1); }); }); describe('SpaceToBatchND', () => { it('should call tfOps.spaceToBatchND', () => { - spyOn(tfOps, 'spaceToBatchND'); node.op = 'SpaceToBatchND'; node.inputParams.blockShape = createNumericArrayAttrFromIndex(1); node.inputParams.paddings = createNumericArrayAttrFromIndex(2); node.inputNames = ['input1', 'input2', 'input3']; const input2 = [tfOps.tensor1d([1, 1, 2, 2])]; const input3 = [tfOps.tensor2d([1, 2, 2, 3, 2, 3, 3, 4], [4, 2])]; - executeOp(node, {input1, input2, input3}, context); + spyOps.spaceToBatchND.and.returnValue({}); + executeOp(node, {input1, input2, input3}, context, spyOpsAsTfOps); - expect(tfOps.spaceToBatchND) + expect(spyOps.spaceToBatchND) .toHaveBeenCalledWith( input1[0], [1, 1, 2, 2], [[1, 2], [2, 3], [2, 3], [3, 4]]); }); }); describe('BatchToSpaceND', () => { it('should call tfOps.batchToSpaceND', () => { - spyOn(tfOps, 'batchToSpaceND'); node.op = 'BatchToSpaceND'; node.inputParams.blockShape = createNumericArrayAttrFromIndex(1); node.inputParams.crops = createNumericArrayAttrFromIndex(2); node.inputNames = ['input1', 'input2', 'input3']; const input2 = [tfOps.tensor1d([1, 1, 2, 2])]; const input3 = [tfOps.tensor2d([1, 2, 2, 3, 2, 3, 3, 4], [4, 2])]; - executeOp(node, {input1, input2, input3}, context); + spyOps.batchToSpaceND.and.returnValue({}); + executeOp(node, {input1, input2, input3}, context, spyOpsAsTfOps); - expect(tfOps.batchToSpaceND) + expect(spyOps.batchToSpaceND) .toHaveBeenCalledWith( input1[0], [1, 1, 2, 2], [[1, 2], [2, 3], [2, 3], [3, 4]]); }); }); describe('DepthToSpace', () => { it('should call tfOps.depthToSpace', () => { - spyOn(tfOps, 'depthToSpace'); node.op = 'DepthToSpace'; node.attrParams.blockSize = createNumberAttr(1); node.attrParams.dataFormat = createStrAttr('nhwc'); node.inputNames = ['input1']; - executeOp(node, {input1}, context); + spyOps.depthToSpace.and.returnValue({}); + executeOp(node, {input1}, context, spyOpsAsTfOps); - expect(tfOps.depthToSpace).toHaveBeenCalledWith(input1[0], 1, 'NHWC'); + expect(spyOps.depthToSpace).toHaveBeenCalledWith(input1[0], 1, 'NHWC'); }); }); describe('BroadcastTo', () => { it('should call tfOps.broadcastTo', () => { - spyOn(tfOps, 'broadcastTo'); node.op = 'BroadcastTo'; node.inputParams.shape = createNumericArrayAttrFromIndex(1); node.inputNames = ['input1', 'input2']; - executeOp(node, {input1, input2}, context); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.broadcastTo).toHaveBeenCalledWith(input1[0], [1, 1]); + expect(spyOps.broadcastTo).toHaveBeenCalledWith(input1[0], [1, 1]); }); }); describe('BroadcastArgs', () => { it('should call tfOps.broadcastArgs', () => { - spyOn(tfOps, 'broadcastArgs'); node.op = 'BroadcastArgs'; node.inputParams.s0 = createTensorAttr(0); node.inputParams.s1 = createTensorAttr(1); node.inputNames = ['input1', 'input2']; const input1 = [tfOps.tensor1d([1, 1])]; const input2 = [tfOps.tensor1d([1, 1])]; - executeOp(node, {input1, input2}, context); + spyOps.broadcastArgs.and.returnValue({}); + executeOp(node, {input1, input2}, context, spyOpsAsTfOps); - expect(tfOps.broadcastArgs).toHaveBeenCalledWith(input1[0], input2[0]); + expect(spyOps.broadcastArgs).toHaveBeenCalledWith(input1[0], input2[0]); }); }); }); diff --git a/tfjs-converter/src/operations/operation_executor.ts b/tfjs-converter/src/operations/operation_executor.ts index 6360d8d8d55..a2151ec4a1b 100644 --- a/tfjs-converter/src/operations/operation_executor.ts +++ b/tfjs-converter/src/operations/operation_executor.ts @@ -53,53 +53,48 @@ import {Node} from './types'; */ export function executeOp( node: Node, tensorMap: NamedTensorsMap, context: ExecutionContext, - resourceManager?: ResourceManager): tfc.Tensor[]|Promise { + resourceManager?: ResourceManager, tidy = tfc.tidy): tfc.Tensor[]| + Promise { const value = ((node: Node, tensorMap: NamedTensorsMap, context: ExecutionContext) => { switch (node.category) { case 'arithmetic': - return tfc.tidy( - () => arithmetic.executeOp(node, tensorMap, context)); + return tidy(() => arithmetic.executeOp(node, tensorMap, context)); case 'basic_math': - return tfc.tidy( - () => basicMath.executeOp(node, tensorMap, context)); + return tidy(() => basicMath.executeOp(node, tensorMap, context)); case 'control': return control.executeOp(node, tensorMap, context); case 'convolution': - return tfc.tidy( - () => convolution.executeOp(node, tensorMap, context)); + return tidy(() => convolution.executeOp(node, tensorMap, context)); case 'creation': - return tfc.tidy(() => creation.executeOp(node, tensorMap, context)); + return tidy(() => creation.executeOp(node, tensorMap, context)); case 'dynamic': return dynamic.executeOp(node, tensorMap, context); case 'evaluation': - return tfc.tidy( - () => evaluation.executeOp(node, tensorMap, context)); + return tidy(() => evaluation.executeOp(node, tensorMap, context)); case 'image': - return tfc.tidy(() => image.executeOp(node, tensorMap, context)); + return tidy(() => image.executeOp(node, tensorMap, context)); case 'graph': - return tfc.tidy(() => graph.executeOp(node, tensorMap, context)); + return tidy(() => graph.executeOp(node, tensorMap, context)); case 'logical': - return tfc.tidy(() => logical.executeOp(node, tensorMap, context)); + return tidy(() => logical.executeOp(node, tensorMap, context)); case 'matrices': - return tfc.tidy(() => matrices.executeOp(node, tensorMap, context)); + return tidy(() => matrices.executeOp(node, tensorMap, context)); case 'normalization': - return tfc.tidy( + return tidy( () => normalization.executeOp(node, tensorMap, context)); case 'reduction': - return tfc.tidy( - () => reduction.executeOp(node, tensorMap, context)); + return tidy(() => reduction.executeOp(node, tensorMap, context)); case 'slice_join': - return tfc.tidy( - () => sliceJoin.executeOp(node, tensorMap, context)); + return tidy(() => sliceJoin.executeOp(node, tensorMap, context)); case 'sparse': - return tfc.tidy(() => sparse.executeOp(node, tensorMap, context)); + return tidy(() => sparse.executeOp(node, tensorMap, context)); case 'spectral': - return tfc.tidy(() => spectral.executeOp(node, tensorMap, context)); + return tidy(() => spectral.executeOp(node, tensorMap, context)); case 'string': - return tfc.tidy(() => string.executeOp(node, tensorMap, context)); + return tidy(() => string.executeOp(node, tensorMap, context)); case 'transformation': - return tfc.tidy( + return tidy( () => transformation.executeOp(node, tensorMap, context)); case 'hash_table': return hashTable.executeOp( @@ -120,7 +115,7 @@ export function executeOp( } })(node, tensorMap, context); if (tfc.util.isPromise(value)) { - return (value as Promise).then((data) => [].concat(data)); + return value.then((data) => [].concat(data)); } return [].concat(value); } diff --git a/tfjs-converter/src/operations/operation_executor_test.ts b/tfjs-converter/src/operations/operation_executor_test.ts index 6617b267129..e826cf02b89 100644 --- a/tfjs-converter/src/operations/operation_executor_test.ts +++ b/tfjs-converter/src/operations/operation_executor_test.ts @@ -78,10 +78,11 @@ describe('OperationExecutor', () => { string, transformation] .forEach(category => { it('should call tidy around executor', () => { - spyOn(tfc, 'tidy'); + const tidySpy = jasmine.createSpy('tidy spy', tfc.tidy); + node.category = category.CATEGORY; - executeOp(node, {}, context); - expect(tfc.tidy).toHaveBeenCalled(); + executeOp(node, {}, context, undefined, tidySpy); + expect(tidySpy).toHaveBeenCalled(); }); }); diff --git a/tfjs-converter/src/operations/types.ts b/tfjs-converter/src/operations/types.ts index 04508504180..5a033e065d5 100644 --- a/tfjs-converter/src/operations/types.ts +++ b/tfjs-converter/src/operations/types.ts @@ -15,6 +15,8 @@ * ============================================================================= */ import {Tensor} from '@tensorflow/tfjs-core'; +// tslint:disable-next-line:no-imports-from-dist +import * as tfOps from '@tensorflow/tfjs-core/dist/ops/ops_for_converter'; import * as tensorflow from '../data/compiled_api'; import {NamedTensorsMap} from '../data/types'; @@ -75,13 +77,13 @@ export declare interface AttrParamMapper extends ParamMapper { } export interface InternalOpExecutor { - (node: Node, tensorMap: NamedTensorsMap, context: ExecutionContext): Tensor - |Tensor[]; + (node: Node, tensorMap: NamedTensorsMap, context: ExecutionContext, + ops?: typeof tfOps): Tensor | Tensor[]; } export interface InternalOpAsyncExecutor { (node: Node, tensorMap: NamedTensorsMap, context: ExecutionContext, - resourceManager?: ResourceManager): Promise; + resourceManager?: ResourceManager, ops?: typeof tfOps): Promise; } export declare interface OpMapper { diff --git a/tfjs-converter/yarn.lock b/tfjs-converter/yarn.lock index d903e65e566..0637507584c 100644 --- a/tfjs-converter/yarn.lock +++ b/tfjs-converter/yarn.lock @@ -2077,10 +2077,10 @@ ts-node@~8.8.2: source-map-support "^0.5.6" yn "3.1.1" -typescript@3.5.3: - version "3.5.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977" - integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g== +typescript@4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.2.tgz#6d618640d430e3569a1dfb44f7d7e600ced3ee86" + integrity sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ== typescript@~3.9.7: version "3.9.9" diff --git a/tfjs-core/package.json b/tfjs-core/package.json index d13e639f5fe..1b867866e4f 100644 --- a/tfjs-core/package.json +++ b/tfjs-core/package.json @@ -45,7 +45,7 @@ "rollup-plugin-visualizer": "~5.6.0", "shelljs": "~0.8.3", "ts-node": "~8.8.2", - "typescript": "3.5.3", + "typescript": "4.4.2", "yalc": "~1.0.0-pre.21", "yargs": "~17.3.1" }, diff --git a/tfjs-core/scripts/test_snippets/util.ts b/tfjs-core/scripts/test_snippets/util.ts index 34e355d6c87..e4b594b8506 100644 --- a/tfjs-core/scripts/test_snippets/util.ts +++ b/tfjs-core/scripts/test_snippets/util.ts @@ -165,8 +165,13 @@ function getJSDocTag(symbol: ts.Symbol): JSDoc { const tags = symbol.getJsDocTags(); for (let i = 0; i < tags.length; i++) { const jsdocTag = tags[i]; - if (jsdocTag.name === 'doc' && jsdocTag.text != null) { - const json = convertDocStringToDocInfoObject(jsdocTag.text.trim()); + if (jsdocTag.name === 'doc' && jsdocTag.text) { + if (jsdocTag.text.length !== 1) { + throw new Error('Expected exactly one jsdoc SymbolDisplayPart but got' + + ` ${jsdocTag.text.length} instead: ${jsdocTag.text}`); + } + const text = jsdocTag.text[0].text.trim(); + const json = convertDocStringToDocInfoObject(text); return json; } } diff --git a/tfjs-core/yarn.lock b/tfjs-core/yarn.lock index 1e0bc8f6a9f..80074210fc6 100644 --- a/tfjs-core/yarn.lock +++ b/tfjs-core/yarn.lock @@ -2074,10 +2074,10 @@ minimatch@^3.0.4: dependencies: brace-expansion "^1.1.7" -minimist@^1.2.5: - version "1.2.5" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" - integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== +minimist@1.2.6, minimist@^1.2.5: + version "1.2.6" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" + integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== mkdirp@^0.5.5: version "0.5.5" @@ -2971,10 +2971,10 @@ type-is@~1.6.18: media-typer "0.3.0" mime-types "~2.1.24" -typescript@3.5.3: - version "3.5.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977" - integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g== +typescript@4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.2.tgz#6d618640d430e3569a1dfb44f7d7e600ced3ee86" + integrity sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ== ua-parser-js@^0.7.30: version "0.7.31" diff --git a/tfjs-data/package.json b/tfjs-data/package.json index aa39cbfa76f..247a1aee3bf 100644 --- a/tfjs-data/package.json +++ b/tfjs-data/package.json @@ -45,7 +45,7 @@ "rollup-plugin-terser": "~7.0.2", "rollup-plugin-visualizer": "~3.3.2", "ts-node": "~7.0.0", - "typescript": "3.5.3", + "typescript": "4.4.2", "yalc": "^1.0.0-pre.50" }, "scripts": { diff --git a/tfjs-data/src/dataset.ts b/tfjs-data/src/dataset.ts index 65a43b5ccdf..07daffb2d9f 100644 --- a/tfjs-data/src/dataset.ts +++ b/tfjs-data/src/dataset.ts @@ -68,7 +68,7 @@ export abstract class Dataset { * this stream *must* be manually disposed to avoid a GPU memory leak. * The tf.tidy() approach cannot be used in an asynchronous context. */ - abstract async iterator(): Promise>; + abstract iterator(): Promise>; readonly size: number = null; diff --git a/tfjs-data/src/datasource.ts b/tfjs-data/src/datasource.ts index 39d36fe3e74..4ec37b18aba 100644 --- a/tfjs-data/src/datasource.ts +++ b/tfjs-data/src/datasource.ts @@ -32,7 +32,7 @@ export abstract class DataSource { * Starts the new stream from the beginning of the data source, even if other * streams have been obtained previously. */ - abstract async iterator(): Promise; + abstract iterator(): Promise; // TODO(soergel): consider chainable Dataset construction here } diff --git a/tfjs-data/src/iterators/lazy_iterator.ts b/tfjs-data/src/iterators/lazy_iterator.ts index 99978f2b164..bff52944b21 100644 --- a/tfjs-data/src/iterators/lazy_iterator.ts +++ b/tfjs-data/src/iterators/lazy_iterator.ts @@ -160,7 +160,7 @@ export abstract class LazyIterator { * * Calling next() on a closed stream returns `{value:null, done:true}`. */ - abstract async next(): Promise>; + abstract next(): Promise>; /** * Collect all remaining elements of a bounded stream into an array. @@ -860,7 +860,7 @@ export abstract class OneToManyIterator extends LazyIterator { * upstream source is exhausted AND nothing was added to the queue * (i.e., any remaining carryover). */ - protected abstract async pump(): Promise; + protected abstract pump(): Promise; async serialNext(): Promise> { // Fetch so that the queue contains at least one item if possible. diff --git a/tfjs-data/src/iterators/microphone_iterator_test.ts b/tfjs-data/src/iterators/microphone_iterator_test.ts index 414ca6ba40c..1c986105167 100644 --- a/tfjs-data/src/iterators/microphone_iterator_test.ts +++ b/tfjs-data/src/iterators/microphone_iterator_test.ts @@ -31,8 +31,7 @@ describeBrowserEnvs('MicrophoneIterator', () => { const microphoneIterator = await tfd.microphone(); const result = await microphoneIterator.next(); expect(result.done).toBeFalsy(); - // tslint:disable-next-line:no-any - expect((result.value as any).spectrogram.shape).toEqual([43, 1024, 1]); + expect((result.value).spectrogram.shape).toEqual([43, 1024, 1]); }); it('throws error when sample rate is not available', async done => { @@ -61,8 +60,7 @@ describeBrowserEnvs('MicrophoneIterator', () => { const microphoneIterator = await tfd.microphone({fftSize: 16}); const result = await microphoneIterator.next(); expect(result.done).toBeFalsy(); - // tslint:disable-next-line:no-any - expect((result.value as any).spectrogram.shape).toEqual([43, 16, 1]); + expect((result.value).spectrogram.shape).toEqual([43, 16, 1]); }); it('throws error with invalid fftSize', async done => { @@ -82,8 +80,7 @@ describeBrowserEnvs('MicrophoneIterator', () => { await tfd.microphone({columnTruncateLength: 232, fftSize: 128}); const result = await microphoneIterator.next(); expect(result.done).toBeFalsy(); - // tslint:disable-next-line:no-any - expect((result.value as any).spectrogram.shape).toEqual([43, 232, 1]); + expect((result.value).spectrogram.shape).toEqual([43, 232, 1]); }); it('gets tensor in correct shape with numFramesPerSpectrogram', @@ -92,8 +89,7 @@ describeBrowserEnvs('MicrophoneIterator', () => { await tfd.microphone({numFramesPerSpectrogram: 3, fftSize: 16}); const result = await microphoneIterator.next(); expect(result.done).toBeFalsy(); - // tslint:disable-next-line:no-any - expect((result.value as any).spectrogram.shape).toEqual([3, 16, 1]); + expect((result.value).spectrogram.shape).toEqual([3, 16, 1]); }); it('gets tensor in correct shape with full spectrogram config', @@ -106,8 +102,7 @@ describeBrowserEnvs('MicrophoneIterator', () => { }); const result = await microphoneIterator.next(); expect(result.done).toBeFalsy(); - // tslint:disable-next-line:no-any - expect((result.value as any).spectrogram.shape).toEqual([10, 10, 1]); + expect((result.value).spectrogram.shape).toEqual([10, 10, 1]); }); it('provides both spectrogram and waveform', async () => { @@ -115,26 +110,22 @@ describeBrowserEnvs('MicrophoneIterator', () => { {includeSpectrogram: true, includeWaveform: true, fftSize: 16}); const result = await microphoneIterator.next(); expect(result.done).toBeFalsy(); - // tslint:disable-next-line:no-any - expect((result.value as any).spectrogram.shape).toEqual([43, 16, 1]); - // tslint:disable-next-line:no-any - expect((result.value as any).waveform.shape).toEqual([688, 1]); + expect((result.value).spectrogram.shape).toEqual([43, 16, 1]); + expect((result.value).waveform.shape).toEqual([688, 1]); }); it('stops and restarts microphone', async () => { const microphoneIterator = await tfd.microphone({fftSize: 16}); const result1 = await microphoneIterator.next(); expect(result1.done).toBeFalsy(); - // tslint:disable-next-line:no-any - expect((result1.value as any).spectrogram.shape).toEqual([43, 16, 1]); + expect((result1.value).spectrogram.shape).toEqual([43, 16, 1]); microphoneIterator.stop(); const result2 = await microphoneIterator.next(); expect(result2.done).toBeTruthy(); expect(result2.value).toBeNull(); microphoneIterator.start(); expect(result1.done).toBeFalsy(); - // tslint:disable-next-line:no-any - expect((result1.value as any).spectrogram.shape).toEqual([43, 16, 1]); + expect((result1.value).spectrogram.shape).toEqual([43, 16, 1]); }); it('stops microphone multiple times', async () => { @@ -142,8 +133,7 @@ describeBrowserEnvs('MicrophoneIterator', () => { await tfd.microphone({fftSize: 16, numFramesPerSpectrogram: 2}); const result1 = await microphoneIterator.next(); expect(result1.done).toBeFalsy(); - // tslint:disable-next-line:no-any - expect((result1.value as any).spectrogram.shape).toEqual([2, 16, 1]); + expect((result1.value).spectrogram.shape).toEqual([2, 16, 1]); microphoneIterator.stop(); const result2 = await microphoneIterator.next(); expect(result2.done).toBeTruthy(); @@ -163,8 +153,7 @@ describeBrowserEnvs('MicrophoneIterator', () => { }); const result = await microphoneIterator.next(); expect(result.done).toBeFalsy(); - // tslint:disable-next-line:no-any - const value = result.value as any; + const value = result.value; expect(value.spectrogram.shape).toEqual([1, 16, 1]); test_util.expectArraysClose( await value.spectrogram.array(), @@ -211,8 +200,7 @@ describeBrowserEnvs('MicrophoneIterator', () => { // should have been called 3 times (at 400ms). expect(timesRun).toBe(3); expect(result.done).toBeFalsy(); - // tslint:disable-next-line:no-any - const value = result.value as any; + const value = result.value; expect(value.spectrogram.shape).toEqual([10, 10, 1]); } }; @@ -221,7 +209,7 @@ describeBrowserEnvs('MicrophoneIterator', () => { const interval = setInterval(getTensor, 1); // Wait 3 seconds for the intervals to run. - await new Promise(resolve => { + await new Promise(resolve => { setTimeout(() => { resolve(); }, 100); @@ -235,8 +223,7 @@ describeBrowserEnvs('MicrophoneIterator', () => { const microphoneIterator = await tfd.microphone({fftSize: 16, numFramesPerSpectrogram: 1}); const result = await microphoneIterator.capture(); - // tslint:disable-next-line:no-any - expect((result as any).spectrogram.shape).toEqual([1, 16, 1]); + expect(result.spectrogram.shape).toEqual([1, 16, 1]); }); it('gets waveform from iterator.capture', async () => { @@ -247,8 +234,7 @@ describeBrowserEnvs('MicrophoneIterator', () => { numFramesPerSpectrogram: 1 }); const result = await microphoneIterator.capture(); - // tslint:disable-next-line:no-any - expect((result as any).waveform.shape).toEqual([16, 1]); + expect(result.waveform.shape).toEqual([16, 1]); }); it('gets spectrogram and waveform from iterator.capture', async () => { @@ -259,10 +245,8 @@ describeBrowserEnvs('MicrophoneIterator', () => { numFramesPerSpectrogram: 1 }); const result = await microphoneIterator.capture(); - // tslint:disable-next-line:no-any - expect((result as any).spectrogram.shape).toEqual([1, 16, 1]); - // tslint:disable-next-line:no-any - expect((result as any).waveform.shape).toEqual([16, 1]); + expect(result.spectrogram.shape).toEqual([1, 16, 1]); + expect(result.waveform.shape).toEqual([16, 1]); }); } }); diff --git a/tfjs-data/src/iterators/string_iterator.ts b/tfjs-data/src/iterators/string_iterator.ts index 7707950ee0d..611947ca8ba 100644 --- a/tfjs-data/src/iterators/string_iterator.ts +++ b/tfjs-data/src/iterators/string_iterator.ts @@ -96,7 +96,7 @@ class SplitIteratorImpl extends OneToManyIterator { this.carryover = ''; return true; } - const lines = chunkResult.value.split(this.separator); + const lines = chunkResult.value.split(this.separator) as string[]; // Note the behavior: " ab ".split(' ') === ['', 'ab', ''] // Thus the carryover may be '' if the separator falls on a chunk // boundary; this produces the correct result. diff --git a/tfjs-data/src/util/test_utils.ts b/tfjs-data/src/util/test_utils.ts index 6709df491ed..e57f674c997 100644 --- a/tfjs-data/src/util/test_utils.ts +++ b/tfjs-data/src/util/test_utils.ts @@ -46,7 +46,7 @@ export async function replaceHTMLVideoElementSource( videoElement.play(); if (videoElement.readyState < 2) { - await new Promise(resolve => { + await new Promise(resolve => { videoElement.addEventListener('loadeddata', () => resolve()); }); } diff --git a/tfjs-data/yarn.lock b/tfjs-data/yarn.lock index e4d0fca3362..6f634d8607d 100644 --- a/tfjs-data/yarn.lock +++ b/tfjs-data/yarn.lock @@ -3696,10 +3696,10 @@ typedarray-to-buffer@^3.1.5: dependencies: is-typedarray "^1.0.0" -typescript@3.5.3: - version "3.5.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977" - integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g== +typescript@4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.2.tgz#6d618640d430e3569a1dfb44f7d7e600ced3ee86" + integrity sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ== ua-parser-js@^0.7.30: version "0.7.31" diff --git a/tfjs-inference/package.json b/tfjs-inference/package.json index 6525bb9019c..e13bf7b0e34 100644 --- a/tfjs-inference/package.json +++ b/tfjs-inference/package.json @@ -16,19 +16,19 @@ }, "license": "Apache-2.0", "devDependencies": { - "@tensorflow/tfjs-core": "2.8.3", - "@tensorflow/tfjs-converter": "2.8.3", "@tensorflow/tfjs-backend-cpu": "2.8.3", "@tensorflow/tfjs-backend-wasm": "2.8.3", + "@tensorflow/tfjs-converter": "2.8.3", + "@tensorflow/tfjs-core": "2.8.3", "@types/jasmine": "~3.0.0", - "@types/rimraf": "~3.0.0", + "@types/rimraf": "~3.0.0", "clang-format": "~1.2.4", "jasmine": "~3.1.0", "jasmine-core": "~3.1.0", "rimraf": "~3.0.2", "ts-node": "~8.8.2", "tslint": "~6.1.3", - "typescript": "3.5.3" + "typescript": "4.4.2" }, "scripts": { "build": "tsc", @@ -42,9 +42,9 @@ "dependencies": { "@types/seedrandom": "2.4.27", "@types/yargs": "~15.0.5", + "pkg": "~4.5.1", "seedrandom": "2.4.3", - "yargs": "~15.3.1", - "pkg": "~4.5.1" + "yargs": "~15.3.1" }, "pkg": { "assets": "node_modules/@tensorflow/tfjs-backend-wasm/dist/tfjs-backend-wasm.wasm" diff --git a/tfjs-inference/yarn.lock b/tfjs-inference/yarn.lock index 800373604e7..7fe7c522be3 100644 --- a/tfjs-inference/yarn.lock +++ b/tfjs-inference/yarn.lock @@ -1614,10 +1614,10 @@ type-check@~0.3.2: dependencies: prelude-ls "~1.1.2" -typescript@3.5.3: - version "3.5.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977" - integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g== +typescript@4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.2.tgz#6d618640d430e3569a1dfb44f7d7e600ced3ee86" + integrity sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ== uid2@0.0.3: version "0.0.3" diff --git a/tfjs-layers/package.json b/tfjs-layers/package.json index 67227e39945..7051b309506 100644 --- a/tfjs-layers/package.json +++ b/tfjs-layers/package.json @@ -42,7 +42,7 @@ "rollup-plugin-terser": "~7.0.2", "rollup-plugin-visualizer": "~3.3.2", "ts-node": "~8.8.2", - "typescript": "3.5.3", + "typescript": "4.4.2", "yalc": "~1.0.0-pre.50" }, "scripts": { diff --git a/tfjs-layers/src/engine/executor.ts b/tfjs-layers/src/engine/executor.ts index bc8dd38ee52..be60770a306 100644 --- a/tfjs-layers/src/engine/executor.ts +++ b/tfjs-layers/src/engine/executor.ts @@ -250,7 +250,7 @@ export function execute( const arrayFetches = Array.isArray(fetches); const fetchArray: SymbolicTensor[] = - arrayFetches ? fetches as SymbolicTensor[] : [fetches as SymbolicTensor]; + arrayFetches ? fetches : [fetches]; const outputNames = fetchArray.map(t => t.name); const finalOutputs: Tensor[] = []; diff --git a/tfjs-layers/src/engine/training.ts b/tfjs-layers/src/engine/training.ts index 83c6f08ec1f..eab16c27584 100644 --- a/tfjs-layers/src/engine/training.ts +++ b/tfjs-layers/src/engine/training.ts @@ -941,7 +941,7 @@ export class LayersModel extends Container implements tfc.InferenceModel { const outputsIsArray = Array.isArray(outputs); const outputNames = - (outputsIsArray ? outputs as string[] : [outputs as string]); + (outputsIsArray ? outputs : [outputs]); const outputSymbolicTensors = this.retrieveSymbolicTensors(outputNames); // Format the input into a FeedDict. diff --git a/tfjs-layers/yarn.lock b/tfjs-layers/yarn.lock index 5aadcc54dd1..7fd6ed16a0b 100644 --- a/tfjs-layers/yarn.lock +++ b/tfjs-layers/yarn.lock @@ -897,9 +897,6 @@ resolved "https://registry.yarnpkg.com/@socket.io/base64-arraybuffer/-/base64-arraybuffer-1.0.2.tgz#568d9beae00b0d835f4f8c53fd55714986492e61" integrity sha512-dOlCBKnDw4iShaIsH/bxujKTM18+2TOAsYz+KSc11Am38H4q5Xw8Bbz97ZYdrVNM+um3p7w86Bvvmcn9q+5+eQ== -"@tensorflow/tfjs-backend-cpu@link:../link-package/node_modules/@tensorflow/link-package/node_modules/@tensorflow/tfjs-backend-cpu": - version "0.0.0" - "@tensorflow/tfjs-backend-cpu@link:../link-package/node_modules/@tensorflow/tfjs-backend-cpu": version "0.0.0" uid "" @@ -942,21 +939,11 @@ resolved "https://registry.yarnpkg.com/@types/jasmine/-/jasmine-2.5.54.tgz#a6b5f2ae2afb6e0307774e8c7c608e037d491c63" integrity sha512-B9YofFbUljs19g5gBKUYeLIulsh31U5AK70F41BImQRHEZQGm4GcN922UvnYwkduMqbC/NH+9fruWa/zrqvHIg== -"@types/long@^4.0.1": - version "4.0.1" - resolved "https://registry.yarnpkg.com/@types/long/-/long-4.0.1.tgz#459c65fa1867dafe6a8f322c4c51695663cc55e9" - integrity sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w== - "@types/node@*", "@types/node@>=10.0.0": version "14.14.36" resolved "https://registry.yarnpkg.com/@types/node/-/node-14.14.36.tgz#5637905dbb15c30a33a3c65b9ef7c20e3c85ebad" integrity sha512-kjivUwDJfIjngzbhooRnOLhGYz6oRFi+L+EpMjxroDYXwDw9lHrJJ43E+dJ6KAd3V3WxWAJ/qZE9XKYHhjPOFQ== -"@types/offscreencanvas@~2019.3.0": - version "2019.3.0" - resolved "https://registry.yarnpkg.com/@types/offscreencanvas/-/offscreencanvas-2019.3.0.tgz#3336428ec7e9180cf4566dfea5da04eb586a6553" - integrity sha512-esIJx9bQg+QYF0ra8GnvfianIY8qWB0GBx54PK5Eps6m+xTj86KLavHv6qDhzKcu5UUOgNfJ2pWaIIV7TRUd9Q== - "@types/resolve@0.0.8": version "0.0.8" resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-0.0.8.tgz#f26074d238e02659e323ce1a13d041eee280e194" @@ -964,21 +951,6 @@ dependencies: "@types/node" "*" -"@types/seedrandom@2.4.27": - version "2.4.27" - resolved "https://registry.yarnpkg.com/@types/seedrandom/-/seedrandom-2.4.27.tgz#9db563937dd86915f69092bc43259d2f48578e41" - integrity sha1-nbVjk33YaRX2kJK8QyWdL0hXjkE= - -"@types/webgl-ext@0.0.30": - version "0.0.30" - resolved "https://registry.yarnpkg.com/@types/webgl-ext/-/webgl-ext-0.0.30.tgz#0ce498c16a41a23d15289e0b844d945b25f0fb9d" - integrity sha512-LKVgNmBxN0BbljJrVUwkxwRYqzsAEPcZOe6S2T6ZaBDIrFp0qu4FNlpc5sM1tGbXUYFgdVQIoeLk1Y1UoblyEg== - -"@types/webgl2@0.0.6": - version "0.0.6" - resolved "https://registry.yarnpkg.com/@types/webgl2/-/webgl2-0.0.6.tgz#1ea2db791362bd8521548d664dbd3c5311cdf4b6" - integrity sha512-50GQhDVTq/herLMiqSQkdtRu+d5q/cWHn4VvKJtrj4DJAjo1MNkWYa2MA41BaBO1q1HgsUjuQvEOk0QHvlnAaQ== - accepts@~1.3.4: version "1.3.7" resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd" @@ -2607,11 +2579,6 @@ log4js@^6.3.0, log4js@^6.4.1: rfdc "^1.3.0" streamroller "^3.0.2" -long@4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/long/-/long-4.0.0.tgz#9a7b71cfb7d361a194ea555241c92f7468d5bf28" - integrity sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA== - magic-string@^0.25.2, magic-string@^0.25.7: version "0.25.7" resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.25.7.tgz#3f497d6fd34c669c6798dcb821f2ef31f5445051" @@ -2739,13 +2706,6 @@ negotiator@0.6.2: resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb" integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw== -node-fetch@~2.6.1: - version "2.6.7" - resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" - integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== - dependencies: - whatwg-url "^5.0.0" - node-releases@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-2.0.2.tgz#7139fe71e2f4f11b47d4d2986aaf8c48699e0c01" @@ -3223,11 +3183,6 @@ secure-compare@3.0.1: resolved "https://registry.yarnpkg.com/secure-compare/-/secure-compare-3.0.1.tgz#f1a0329b308b221fae37b9974f3d578d0ca999e3" integrity sha1-8aAymzCLIh+uN7mXTz1XjQypmeM= -seedrandom@2.4.3: - version "2.4.3" - resolved "https://registry.yarnpkg.com/seedrandom/-/seedrandom-2.4.3.tgz#2438504dad33917314bff18ac4d794f16d6aaecc" - integrity sha1-JDhQTa0zkXMUv/GKxNeU8W1qrsw= - semver@7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e" @@ -3482,11 +3437,6 @@ toidentifier@1.0.0: resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553" integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw== -tr46@~0.0.3: - version "0.0.3" - resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" - integrity sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o= - ts-node@~8.8.2: version "8.8.2" resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-8.8.2.tgz#0b39e690bee39ea5111513a9d2bcdc0bc121755f" @@ -3511,10 +3461,10 @@ type-is@~1.6.17: media-typer "0.3.0" mime-types "~2.1.24" -typescript@3.5.3: - version "3.5.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977" - integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g== +typescript@4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.2.tgz#6d618640d430e3569a1dfb44f7d7e600ced3ee86" + integrity sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ== ua-parser-js@^0.7.30: version "0.7.31" @@ -3633,19 +3583,6 @@ wcwidth@^1.0.1: dependencies: defaults "^1.0.3" -webidl-conversions@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" - integrity sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE= - -whatwg-url@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" - integrity sha1-lmRU6HZUYuN2RNNib2dCzotwll0= - dependencies: - tr46 "~0.0.3" - webidl-conversions "^3.0.0" - which-boxed-primitive@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" diff --git a/tfjs-node-gpu/package.json b/tfjs-node-gpu/package.json index 4150019284c..527366153ee 100644 --- a/tfjs-node-gpu/package.json +++ b/tfjs-node-gpu/package.json @@ -67,7 +67,7 @@ "ts-node": "^5.0.1", "tslint": "~6.1.3", "tslint-no-circular-imports": "^0.7.0", - "typescript": "3.5.3", + "typescript": "4.4.2", "yalc": "~1.0.0-pre.50", "yargs": "^16.2.0" }, diff --git a/tfjs-node-gpu/yarn.lock b/tfjs-node-gpu/yarn.lock index 4355956596c..0be8f3d969a 100644 --- a/tfjs-node-gpu/yarn.lock +++ b/tfjs-node-gpu/yarn.lock @@ -353,7 +353,7 @@ abbrev@1: adm-zip@^0.5.2: version "0.5.9" resolved "https://registry.yarnpkg.com/adm-zip/-/adm-zip-0.5.9.tgz#b33691028333821c0cf95c31374c5462f2905a83" - integrity "sha1-szaRAoMzghwM+VwxN0xUYvKQWoM= sha512-s+3fXLkeeLjZ2kLjCBwQufpI5fuN+kIGBxu6530nVQZGVol0d7Y/M88/xw9HGGUcJjKf8LutN3VPRUBq6N7Ajg==" + integrity sha512-s+3fXLkeeLjZ2kLjCBwQufpI5fuN+kIGBxu6530nVQZGVol0d7Y/M88/xw9HGGUcJjKf8LutN3VPRUBq6N7Ajg== agent-base@6: version "6.0.2" @@ -2074,10 +2074,10 @@ typedarray-to-buffer@^3.1.5: dependencies: is-typedarray "^1.0.0" -typescript@3.5.3: - version "3.5.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977" - integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g== +typescript@4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.2.tgz#6d618640d430e3569a1dfb44f7d7e600ced3ee86" + integrity sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ== universalify@^0.1.0: version "0.1.2" diff --git a/tfjs-node/package.json b/tfjs-node/package.json index 5a91c557d1f..91fa370fdeb 100644 --- a/tfjs-node/package.json +++ b/tfjs-node/package.json @@ -64,7 +64,7 @@ "ts-node": "^5.0.1", "tslint": "~6.1.3", "tslint-no-circular-imports": "^0.7.0", - "typescript": "3.5.3", + "typescript": "4.4.2", "yalc": "~1.0.0-pre.50", "yargs": "^16.2.0" }, diff --git a/tfjs-node/yarn.lock b/tfjs-node/yarn.lock index a2e61075efa..de4a3ab47e7 100644 --- a/tfjs-node/yarn.lock +++ b/tfjs-node/yarn.lock @@ -1779,10 +1779,10 @@ typedarray-to-buffer@^3.1.5: dependencies: is-typedarray "^1.0.0" -typescript@3.5.3: - version "3.5.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977" - integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g== +typescript@4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.2.tgz#6d618640d430e3569a1dfb44f7d7e600ced3ee86" + integrity sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ== universalify@^0.1.0: version "0.1.2" diff --git a/tfjs-react-native/integration_rn59/package.json b/tfjs-react-native/integration_rn59/package.json index 5c6e91121b1..f59ece9c345 100644 --- a/tfjs-react-native/integration_rn59/package.json +++ b/tfjs-react-native/integration_rn59/package.json @@ -56,7 +56,7 @@ "ts-node": "~8.8.2", "tslint": "~5.11.0", "tslint-no-circular-imports": "^0.5.0", - "typescript": "3.5.3", + "typescript": "4.4.2", "webdriverio": "^5.13.2" }, "jest": { diff --git a/tfjs-react-native/integration_rn59/yarn.lock b/tfjs-react-native/integration_rn59/yarn.lock index dc2aaada3ee..e3c4617ecce 100644 --- a/tfjs-react-native/integration_rn59/yarn.lock +++ b/tfjs-react-native/integration_rn59/yarn.lock @@ -7428,10 +7428,10 @@ typedarray@^0.0.6: resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c= -typescript@3.5.3: - version "3.5.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977" - integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g== +typescript@4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.2.tgz#6d618640d430e3569a1dfb44f7d7e600ced3ee86" + integrity sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ== ua-parser-js@^0.7.18, ua-parser-js@^0.7.19: version "0.7.31" diff --git a/tfjs-react-native/package.json b/tfjs-react-native/package.json index 50b10496f03..4ee0c34c7ba 100644 --- a/tfjs-react-native/package.json +++ b/tfjs-react-native/package.json @@ -59,7 +59,7 @@ "rollup-plugin-uglify": "~3.0.0", "tslint": "~6.1.3", "tslint-no-circular-imports": "^0.7.0", - "typescript": "3.5.3", + "typescript": "4.4.2", "yalc": "^1.0.0-pre.50" }, "dependencies": { diff --git a/tfjs-react-native/yarn.lock b/tfjs-react-native/yarn.lock index 024b240b897..013063e2e7e 100644 --- a/tfjs-react-native/yarn.lock +++ b/tfjs-react-native/yarn.lock @@ -2460,10 +2460,10 @@ typedarray-to-buffer@^3.1.5: dependencies: is-typedarray "^1.0.0" -typescript@3.5.3: - version "3.5.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977" - integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g== +typescript@4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.2.tgz#6d618640d430e3569a1dfb44f7d7e600ced3ee86" + integrity sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ== uglify-es@^3.3.7: version "3.3.9" diff --git a/tfjs-tflite/demo/package.json b/tfjs-tflite/demo/package.json index 9b98bff7475..7cdcb7c0a60 100644 --- a/tfjs-tflite/demo/package.json +++ b/tfjs-tflite/demo/package.json @@ -12,7 +12,7 @@ "clang-format": "~1.5.0", "cross-env": "^7.0.3", "parcel": "^2.3.2", - "typescript": "3.5.3" + "typescript": "4.4.2" }, "scripts": { "build-deps": "cd ../../link-package && yarn build", diff --git a/tfjs-tflite/demo/src/script.ts b/tfjs-tflite/demo/src/script.ts index 32ddcfdb79b..4cea7a88644 100644 --- a/tfjs-tflite/demo/src/script.ts +++ b/tfjs-tflite/demo/src/script.ts @@ -107,8 +107,7 @@ function handleClickTrigger( canvas.classList.add('show'); // Show latency stat. - const stats = trigger.closest('.img-container')!.querySelector('.stats')! as - HTMLCanvasElement; + const stats = trigger.closest('.img-container')!.querySelector('.stats')!; stats.classList.add('show'); stats.innerHTML = latency.toFixed(1) + ' ms'; } diff --git a/tfjs-tflite/demo/yarn.lock b/tfjs-tflite/demo/yarn.lock index 37bc9ec8259..ca4480bcf1b 100644 --- a/tfjs-tflite/demo/yarn.lock +++ b/tfjs-tflite/demo/yarn.lock @@ -1744,10 +1744,10 @@ type-fest@^0.20.2: resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== -typescript@3.5.3: - version "3.5.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977" - integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g== +typescript@4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.2.tgz#6d618640d430e3569a1dfb44f7d7e600ced3ee86" + integrity sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ== util-deprecate@^1.0.2: version "1.0.2" diff --git a/tfjs-tflite/package.json b/tfjs-tflite/package.json index f6b410845d3..01b7f8fbf8b 100644 --- a/tfjs-tflite/package.json +++ b/tfjs-tflite/package.json @@ -47,7 +47,7 @@ "tmp": "^0.2.1", "ts-node": "^5.0.1", "tslib": "^2.1.0", - "typescript": "3.5.3", + "typescript": "4.4.2", "yalc": "~1.0.0-pre.50" }, "scripts": { diff --git a/tfjs-tflite/yarn.lock b/tfjs-tflite/yarn.lock index a7a608a6902..f2c9a70e891 100644 --- a/tfjs-tflite/yarn.lock +++ b/tfjs-tflite/yarn.lock @@ -3316,10 +3316,10 @@ type-is@~1.6.17: media-typer "0.3.0" mime-types "~2.1.24" -typescript@3.5.3: - version "3.5.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977" - integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g== +typescript@4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.2.tgz#6d618640d430e3569a1dfb44f7d7e600ced3ee86" + integrity sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ== ua-parser-js@^0.7.30: version "0.7.31" diff --git a/tfjs-vis/src/render/heatmap.ts b/tfjs-vis/src/render/heatmap.ts index f7d963f2aba..781e4a8ca12 100644 --- a/tfjs-vis/src/render/heatmap.ts +++ b/tfjs-vis/src/render/heatmap.ts @@ -170,13 +170,13 @@ export async function heatmap( 'field': 'x', 'type': options.xType, 'title': options.xLabel, - 'sort': false, + 'sort': null, }, 'y': { 'field': 'y', 'type': options.yType, 'title': options.yLabel, - 'sort': false, + 'sort': null, }, 'fill': { 'field': 'value', diff --git a/tfjs/package.json b/tfjs/package.json index f1e48b75341..2e4d6d78858 100644 --- a/tfjs/package.json +++ b/tfjs/package.json @@ -50,7 +50,7 @@ "ts-node": "~8.8.2", "tslint": "~5.11.0", "tslint-no-circular-imports": "~0.5.0", - "typescript": "3.5.3", + "typescript": "4.4.2", "yalc": "1.0.0-pre.50" }, "scripts": { diff --git a/tfjs/yarn.lock b/tfjs/yarn.lock index bad20f773ac..2516a33075c 100644 --- a/tfjs/yarn.lock +++ b/tfjs/yarn.lock @@ -3790,10 +3790,10 @@ type-is@~1.6.17: media-typer "0.3.0" mime-types "~2.1.24" -typescript@3.5.3: - version "3.5.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977" - integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g== +typescript@4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.2.tgz#6d618640d430e3569a1dfb44f7d7e600ced3ee86" + integrity sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ== ua-parser-js@^0.7.30: version "0.7.31" diff --git a/tsconfig.json b/tsconfig.json index 2f07a01044c..f1c2762318e 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -11,7 +11,7 @@ "declaration": true, "target": "es2017", "lib": [ - "es2017", + "es2019", "dom" ], "outDir": "./dist", diff --git a/tsconfig.test.json b/tsconfig.test.json index 8e3496aced3..a5f42137958 100644 --- a/tsconfig.test.json +++ b/tsconfig.test.json @@ -10,7 +10,7 @@ "declaration": false, "target": "es5", "lib": [ - "es2017", + "es2019", "dom" ], "outDir": "./dist", diff --git a/tsconfig_ts_library.json b/tsconfig_ts_library.json index 23b618ca766..91195787846 100644 --- a/tsconfig_ts_library.json +++ b/tsconfig_ts_library.json @@ -12,7 +12,7 @@ "declaration": true, "target": "es2017", "lib": [ - "es2017", + "es2019", "dom" ], "noUnusedLocals": true, diff --git a/yarn.lock b/yarn.lock index a555b6ea727..710140c4791 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2,24 +2,17 @@ # yarn lockfile v1 -"@babel/code-frame@^7.0.0": +"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4": version "7.15.8" resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.15.8.tgz#45990c47adadb00c03677baa89221f7cc23d2503" integrity sha512-2IAnmn8zbvC/jKYhq5Ki9I+DwjlrtMPUCH/CpHvqI4dNnlwHwsxoIhlc8WcYY5LSYknXQtAlFYuHfqAFCvQ4Wg== dependencies: "@babel/highlight" "^7.14.5" -"@babel/code-frame@^7.10.4": - version "7.14.5" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.14.5.tgz#23b08d740e83f49c5e59945fbf1b43e80bbf4edb" - integrity sha512-9pzDqyc6OLDaqe+zbACgFkb6fKMNG6CObKpnYXChRsvYGyEdc7CA2BaqeOM+vOtCS5ndmJicPJhKAwYRI6UfFw== - dependencies: - "@babel/highlight" "^7.14.5" - "@babel/helper-validator-identifier@^7.14.5": - version "7.14.9" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.9.tgz#6654d171b2024f6d8ee151bf2509699919131d48" - integrity sha512-pQYxPY0UP6IHISRitNe8bsijHex4TWZXi2HwKVsjPiltzlhse2znVcm9Ace510VT1kxIHjGJCZZQBX2gJDbo0g== + version "7.15.7" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.15.7.tgz#220df993bfe904a4a6b02ab4f3385a5ebf6e2389" + integrity sha512-K4JvCtQqad9OY2+yTU8w+E82ywk/fe+ELNlt1G8z3bVGlZfn/hOcQQsUhGhW/N+tb3fxK800wLtKOE/aM0m72w== "@babel/highlight@^7.14.5": version "7.14.5" @@ -318,9 +311,9 @@ "@types/node" "*" "@types/node@*", "@types/node@>=10.0.0": - version "16.6.0" - resolved "https://registry.yarnpkg.com/@types/node/-/node-16.6.0.tgz#0d5685f85066f94e97f19e8a67fe003c5fadacc4" - integrity sha512-OyiZPohMMjZEYqcVo/UJ04GyAxXOJEZO/FpzyXxcH4r/ArrVoXHf4MbUrkLp0Tz7/p1mMKpo5zJ6ZHl8XBNthQ== + version "16.10.3" + resolved "https://registry.yarnpkg.com/@types/node/-/node-16.10.3.tgz#7a8f2838603ea314d1d22bb3171d899e15c57bd5" + integrity sha512-ho3Ruq+fFnBrZhUYI46n/bV2GjwzSkwuT4dTf0GkuNFmnb8nq4ny2z9JEVemFi6bdEJanHLlYfy9c6FN9B9McQ== "@types/node@^10.1.0": version "10.17.60" @@ -328,9 +321,9 @@ integrity sha512-F0KIgDJfy2nA3zMLmWGKxcH2ZVEtCZXHHdOQs2gSaQ27+lNeEfGxzkIw90aXswATX7AZ33tahPbzy6KAfUreVw== "@types/node@^12.7.5": - version "12.20.19" - resolved "https://registry.yarnpkg.com/@types/node/-/node-12.20.19.tgz#538e61fc220f77ae4a4663c3d8c3cb391365c209" - integrity sha512-niAuZrwrjKck4+XhoCw6AAVQBENHftpXw9F4ryk66fTgYaKQ53R4FI7c9vUGGw5vQis1HKBHDR1gcYI/Bq1xvw== + version "12.20.28" + resolved "https://registry.yarnpkg.com/@types/node/-/node-12.20.28.tgz#4b20048c6052b5f51a8d5e0d2acbf63d5a17e1e2" + integrity sha512-cBw8gzxUPYX+/5lugXIPksioBSbE42k0fZ39p+4yRzfYjN6++eq9kAPdlY9qm+MXyfbk9EmvCYAYRn380sF46w== "@types/offscreencanvas@~2019.3.0": version "2019.3.0" @@ -407,7 +400,7 @@ ajv@~6.12.3: json-schema-traverse "^0.4.1" uri-js "^4.2.2" -ansi-regex@^5.0.0: +ansi-regex@^5.0.1: version "5.0.1" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304" integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ== @@ -568,7 +561,7 @@ c8@~7.5.0: yargs "^16.0.0" yargs-parser "^20.0.0" -call-bind@^1.0.0, call-bind@^1.0.2: +call-bind@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.2.tgz#b1d4e89e688119c3c9a903ad30abb2f6a919be3c" integrity sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA== @@ -733,9 +726,9 @@ copyfiles@~1.2.0: through2 "^2.0.1" core-js@3: - version "3.16.1" - resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.16.1.tgz#f4485ce5c9f3c6a7cb18fa80488e08d362097249" - integrity sha512-AAkP8i35EbefU+JddyWi12AWE9f2N/qr/pwnDtWz4nyUIBGMJPX99ANFFRSw6FefM374lDujdtLDyhN2A/btHw== + version "3.18.2" + resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.18.2.tgz#63a551e8a29f305cd4123754846e65896619ba5b" + integrity sha512-zNhPOUoSgoizoSQFdX1MeZO16ORRb9FFQLts8gSYbZU5FcgXhp24iMWMxnOQo5uIaIG7/6FA/IqJPwev1o9ZXQ== core-util-is@~1.0.0: version "1.0.3" @@ -1027,9 +1020,9 @@ fast-json-stable-stringify@^2.0.0: integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== fastq@^1.6.0: - version "1.11.1" - resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.11.1.tgz#5d8175aae17db61947f8b162cfc7f63264d22807" - integrity sha512-HOnr8Mc60eNYl1gzwp6r5RoUyAn5/glBolUzP/Ez6IFVPMPirxn/9phgL6zhOtaTy7ISwPvQ+wT+hfcRZh/bzw== + version "1.13.0" + resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c" + integrity sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw== dependencies: reusify "^1.0.4" @@ -1075,9 +1068,9 @@ flatted@^3.2.4: integrity sha512-8/sOawo8tJ4QOBX8YlQBMxL8+RLZfxMQOif9o0KUKTNTjMYElWPE0r/m5VNFxTRd0NSw8qSy8dajrwX4RYI1Hw== follow-redirects@^1.0.0: - version "1.14.8" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.8.tgz#016996fb9a11a100566398b1c6839337d7bfa8fc" - integrity sha512-1x0S9UVJHsQprFcEC/qnNzBLcIxsjAV905f/UkQxbclCsoTWlacCNOpQa/anodLl2uaEKFhfWOvM2Qg77+15zA== + version "1.14.4" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.14.4.tgz#838fdf48a8bbdd79e52ee51fb1c94e3ed98b9379" + integrity sha512-zwGkiSXC1MUJG/qmeIFH2HBJx9u0V46QGUe3YR1fXG8bXQxq7fLj0RjLZQ5nubr9qNJUZrH+xUcwXEoXNpfS+g== foreground-child@^2.0.0: version "2.0.0" @@ -1153,9 +1146,9 @@ glob-parent@^5.1.2, glob-parent@~5.1.2: is-glob "^4.0.1" glob@^7.0.0, glob@^7.0.5, glob@^7.0.6, glob@^7.1.1, glob@^7.1.3, glob@^7.1.4, glob@^7.1.6, glob@^7.1.7: - version "7.1.7" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.7.tgz#3b193e9233f01d42d0b3f78294bbeeb418f94a90" - integrity sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ== + version "7.2.0" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.0.tgz#d15535af7732e02e948f4c41628bd910293f6023" + integrity sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q== dependencies: fs.realpath "^1.0.0" inflight "^1.0.4" @@ -1165,9 +1158,9 @@ glob@^7.0.0, glob@^7.0.5, glob@^7.0.6, glob@^7.1.1, glob@^7.1.3, glob@^7.1.4, gl path-is-absolute "^1.0.0" google-protobuf@^3.6.1: - version "3.18.0" - resolved "https://registry.yarnpkg.com/google-protobuf/-/google-protobuf-3.18.0.tgz#687449d8e858305d658dc1145852c306d8222f5a" - integrity sha512-WlaQWRkUOo/lm9uTgNH6nk9IQt814RggWPzKBfnAVewOFzSzRUSmS1yUWRT6ixW1vS7er5p6tmLSmwzpPpmc8A== + version "3.18.1" + resolved "https://registry.yarnpkg.com/google-protobuf/-/google-protobuf-3.18.1.tgz#31de10b65e833aa5bbd44680e8a748fa54c920f6" + integrity sha512-cDqSamZ8rGs+pOzhIsBte7wpezUKg/sggeptDWN5odhnRY/eDLa5VWLeNeQvcfiqjS3yUwgM+6OePCJMB7aWZA== graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0, graceful-fs@^4.2.6: version "4.2.9" @@ -1283,11 +1276,12 @@ interpret@^1.0.0: integrity sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA== is-arguments@^1.0.4: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.1.0.tgz#62353031dfbee07ceb34656a6bde59efecae8dd9" - integrity sha512-1Ij4lOMPl/xB5kBDn7I+b2ttPMKa8szhEIrXDuXQD/oe3HJLTLhqhgGspwgyGd6MOywBUqVvYicF72lkgDnIHg== + version "1.1.1" + resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.1.1.tgz#15b3f88fda01f2a97fec84ca761a560f123efa9b" + integrity sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA== dependencies: - call-bind "^1.0.0" + call-bind "^1.0.2" + has-tostringtag "^1.0.0" is-binary-path@~2.1.0: version "2.1.0" @@ -1297,9 +1291,9 @@ is-binary-path@~2.1.0: binary-extensions "^2.0.0" is-core-module@^2.2.0: - version "2.5.0" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.5.0.tgz#f754843617c70bfd29b7bd87327400cda5c18491" - integrity sha512-TXCMSDsEHMEEZ6eCA8rwRDbLu55MRGmrctljsBX/2v1d9/GzqHOxW5c5oPSgrUt2vBFXebu9rGqckXGPWOlYpg== + version "2.7.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.7.0.tgz#3c0ef7d31b4acfc574f80c58409d568a836848e3" + integrity sha512-ByY+tjCciCr+9nLryBYcSD50EOGWt95c7tIsKTG1J2ixKKXPvF7Ej3AVd+UfDydAJom3biBGDBALaO79ktwgEQ== dependencies: has "^1.0.3" @@ -1326,9 +1320,9 @@ is-fullwidth-code-point@^3.0.0: integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== is-glob@^4.0.1, is-glob@~4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" - integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== + version "4.0.3" + resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" + integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== dependencies: is-extglob "^2.1.1" @@ -1350,12 +1344,12 @@ is-reference@^1.2.1: "@types/estree" "*" is-regex@^1.0.4: - version "1.1.3" - resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.3.tgz#d029f9aff6448b93ebbe3f33dac71511fdcbef9f" - integrity sha512-qSVXFz28HM7y+IWX6vLCsexdlvzT1PJNFSBuaQLQ5o0IEw8UDYW6/2+eCMVyIsbM8CNLX2a/QWmSpyxYEHY7CQ== + version "1.1.4" + resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.4.tgz#eef5663cd59fa4c0ae339505323df6854bb15958" + integrity sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg== dependencies: call-bind "^1.0.2" - has-symbols "^1.0.2" + has-tostringtag "^1.0.0" is-running@^2.1.0: version "2.1.0" @@ -1419,9 +1413,9 @@ istanbul-lib-report@^3.0.0: supports-color "^7.1.0" istanbul-reports@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.0.2.tgz#d593210e5000683750cb09fc0644e4b6e27fd53b" - integrity sha512-9tZvz7AiR3PEDNGiV9vIouQ/EAcqMXFmkcA1CDFTwOB98OZVDL0PH9glHotf5Ugp6GCOTypfzGWI/OqjWNCRUw== + version "3.0.3" + resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.0.3.tgz#974d682037f6d12b15dc55f9a2a5f8f1ea923831" + integrity sha512-0i77ZFLsb9U3DHi22WzmIngVzfoyxxbQcZRqlF3KoKmCJGq9nhFHoGi8FqBztN2rE8w6hURnZghetn0xpkVb6A== dependencies: html-escaper "^2.0.0" istanbul-lib-report "^3.0.0" @@ -1646,17 +1640,17 @@ micromatch@^4.0.4: braces "^3.0.1" picomatch "^2.2.3" -mime-db@1.51.0: - version "1.51.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.51.0.tgz#d9ff62451859b18342d960850dc3cfb77e63fb0c" - integrity sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g== +mime-db@1.50.0: + version "1.50.0" + resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.50.0.tgz#abd4ac94e98d3c0e185016c67ab45d5fde40c11f" + integrity sha512-9tMZCDlYHqeERXEHO9f/hKfNXhre5dK2eE/krIvUjZbS2KPcqGDfNShIWS1uW9XOTKQKqK6qbeOci18rbfW77A== mime-types@~2.1.24: - version "2.1.34" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.34.tgz#5a712f9ec1503511a945803640fafe09d3793c24" - integrity sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A== + version "2.1.33" + resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.33.tgz#1fa12a904472fafd068e48d9e8401f74d3f70edb" + integrity sha512-plLElXp7pRDd0bNZHw+nMd52vRYjLwQjygaNg7ddJ2uJtTlmnTCjWuPKxVu6//AdaRuME84SvLW91sIkBqGT0g== dependencies: - mime-db "1.51.0" + mime-db "1.50.0" mime@^2.5.2: version "2.5.2" @@ -1717,7 +1711,14 @@ nice-try@^1.0.4: resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== -node-fetch@^2.1.1, node-fetch@^2.6.7: +node-fetch@^2.1.1: + version "2.6.5" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.5.tgz#42735537d7f080a7e5f78b6c549b7146be1742fd" + integrity sha512-mmlIVHJEu5rnIxgEgez6b9GgWXbkZj5YZ7fx+2r94a2E+Uirsp6HsPTPlomfdHtpt/B0cdKviwkoaM6pyvUOpQ== + dependencies: + whatwg-url "^5.0.0" + +node-fetch@^2.6.7: version "2.6.7" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== @@ -1878,17 +1879,7 @@ pause-stream@0.0.11: dependencies: through "~2.3" -picomatch@^2.0.4, picomatch@^2.2.1: - version "2.2.2" - resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad" - integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg== - -picomatch@^2.2.2: - version "2.2.3" - resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.3.tgz#465547f359ccc206d3c48e46a1bcb89bf7ee619d" - integrity sha512-KpELjfwcCDUb9PeigTs2mBJzXUPzAuP2oPcA989He8Rte0+YUAjw1JVedDhuTKPkHjSYzMN3npC9luThGYEKdg== - -picomatch@^2.2.3: +picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.2, picomatch@^2.2.3: version "2.3.0" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.0.tgz#f1f061de8f6a4bf022892e2d128234fb98302972" integrity sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw== @@ -2140,9 +2131,9 @@ rollup-plugin-visualizer@~3.3.2: yargs "^15.0.0" rollup@^2.46.0: - version "2.56.2" - resolved "https://registry.yarnpkg.com/rollup/-/rollup-2.56.2.tgz#a045ff3f6af53ee009b5f5016ca3da0329e5470f" - integrity sha512-s8H00ZsRi29M2/lGdm1u8DJpJ9ML8SUOpVVBd33XNeEeL3NVaTiUcSBHzBdF3eAyR0l7VSpsuoVUGrRHq7aPwQ== + version "2.58.0" + resolved "https://registry.yarnpkg.com/rollup/-/rollup-2.58.0.tgz#a643983365e7bf7f5b7c62a8331b983b7c4c67fb" + integrity sha512-NOXpusKnaRpbS7ZVSzcEXqxcLDOagN6iFS8p45RkoiMqPHDLwJm758UF05KlMoCRbLBTZsPOIa887gZJ1AiXvw== optionalDependencies: fsevents "~2.3.2" @@ -2239,9 +2230,9 @@ shelljs@~0.8.5: rechoir "^0.6.2" signal-exit@^3.0.0, signal-exit@^3.0.2: - version "3.0.3" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c" - integrity sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA== + version "3.0.5" + resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.5.tgz#9e3e8cc0c75a99472b44321033a7702e7738252f" + integrity sha512-KWcOiKeQj6ZyXx7zq4YxSMgHRlod4czeBQZrPb8OKcohcqAXShm7E20kEMle9WBt26hFcAf0qLOcp5zmY7kOqQ== simple-wcswidth@^1.0.1: version "1.0.1" @@ -2290,10 +2281,10 @@ source-map-support@0.5.9: buffer-from "^1.0.0" source-map "^0.6.0" -source-map-support@^0.5.6, source-map-support@~0.5.19: - version "0.5.19" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.19.tgz#a98b62f86dcaf4f67399648c085291ab9e8fed61" - integrity sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw== +source-map-support@^0.5.6, source-map-support@~0.5.20: + version "0.5.20" + resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.20.tgz#12166089f8f5e5e8c56926b377633392dd2cb6c9" + integrity sha512-n1lZZ8Ve4ksRqizaBQgxXDgKwttHDhyfQjA6YZZn8+AroHbsIz+JjwxQDxbp+7y5OYCI8t1Yk7etjD9CRd2hIw== dependencies: buffer-from "^1.0.0" source-map "^0.6.0" @@ -2347,13 +2338,13 @@ streamroller@^3.0.2: fs-extra "^10.0.0" string-width@^4.1.0, string-width@^4.2.0: - version "4.2.2" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.2.tgz#dafd4f9559a7585cfba529c6a0a4f73488ebd4c5" - integrity sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA== + version "4.2.3" + resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" + integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== dependencies: emoji-regex "^8.0.0" is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.0" + strip-ansi "^6.0.1" string_decoder@~0.10.x: version "0.10.31" @@ -2367,12 +2358,12 @@ string_decoder@~1.1.1: dependencies: safe-buffer "~5.1.0" -strip-ansi@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.0.tgz#0b1571dd7669ccd4f3e06e14ef1eed26225ae532" - integrity sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w== +strip-ansi@^6.0.0, strip-ansi@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" + integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== dependencies: - ansi-regex "^5.0.0" + ansi-regex "^5.0.1" strip-eof@^1.0.0: version "1.0.0" @@ -2401,13 +2392,13 @@ temp-fs@^0.9.9: rimraf "~2.5.2" terser@^5.0.0, terser@^5.7.0: - version "5.7.1" - resolved "https://registry.yarnpkg.com/terser/-/terser-5.7.1.tgz#2dc7a61009b66bb638305cb2a824763b116bf784" - integrity sha512-b3e+d5JbHAe/JSjwsC3Zn55wsBIM7AsHLjKxT31kGCldgbpFePaFo+PiddtO6uwRZWRw7sPXmAN8dTW61xmnSg== + version "5.9.0" + resolved "https://registry.yarnpkg.com/terser/-/terser-5.9.0.tgz#47d6e629a522963240f2b55fcaa3c99083d2c351" + integrity sha512-h5hxa23sCdpzcye/7b8YqbE5OwKca/ni0RQz1uRX3tGh8haaGHqcuSqbGRybuAKNdntZ0mDgFNXPJ48xQ2RXKQ== dependencies: commander "^2.20.0" source-map "~0.7.2" - source-map-support "~0.5.19" + source-map-support "~0.5.20" test-exclude@^6.0.0: version "6.0.0" @@ -2479,12 +2470,17 @@ tslib@^1.13.0, tslib@^1.8.1: resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== +tslib@^2.3.1: + version "2.3.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.3.1.tgz#e8a335add5ceae51aa261d32a490158ef042ef01" + integrity sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw== + tslint-no-circular-imports@~0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/tslint-no-circular-imports/-/tslint-no-circular-imports-0.7.0.tgz#9df0a15654d66b172e0b7843eed073fa5ae99b5f" integrity sha512-k3wxpeMC4ef40UbpfBVHEHIzKfNZq5/SCtAO1YjGsaNTklo+K53/TWLrym+poA65RJFDiYgYNWvkeIIkJNA0Vw== -tslint@^6.1.3: +tslint@~6.1.3: version "6.1.3" resolved "https://registry.yarnpkg.com/tslint/-/tslint-6.1.3.tgz#5c23b2eccc32487d5523bd3a470e9aa31789d904" integrity sha512-IbR4nkT96EQOvKE2PW/djGz8iGNeJ4rF2mBfiYaR/nvUWYKJhLwimoJKgjIFEIDibBtOevj7BqCRL4oHeWWUCg== @@ -2525,10 +2521,10 @@ type-is@~1.6.17: media-typer "0.3.0" mime-types "~2.1.24" -typescript@3.5.3: - version "3.5.3" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.5.3.tgz#c830f657f93f1ea846819e929092f5fe5983e977" - integrity sha512-ACzBtm/PhXBDId6a6sDJfroT2pOWt/oOnk4/dElG5G33ZL776N3Y6/6bKZJBFpd+b05F3Ct9qDjMeJmRWtE2/g== +typescript@4.4.2: + version "4.4.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.2.tgz#6d618640d430e3569a1dfb44f7d7e600ced3ee86" + integrity sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ== ua-parser-js@^0.7.30: version "0.7.31"