chore: removed node_modules from repository
This commit is contained in:
parent
1d99937849
commit
c72b943e3e
1
node_modules/.bin/detect-libc
generated
vendored
1
node_modules/.bin/detect-libc
generated
vendored
@ -1 +0,0 @@
|
|||||||
../detect-libc/bin/detect-libc.js
|
|
||||||
1
node_modules/.bin/jiti
generated
vendored
1
node_modules/.bin/jiti
generated
vendored
@ -1 +0,0 @@
|
|||||||
../jiti/lib/jiti-cli.mjs
|
|
||||||
1
node_modules/.bin/mkdirp
generated
vendored
1
node_modules/.bin/mkdirp
generated
vendored
@ -1 +0,0 @@
|
|||||||
../mkdirp/dist/cjs/src/bin.js
|
|
||||||
1
node_modules/.bin/tailwindcss
generated
vendored
1
node_modules/.bin/tailwindcss
generated
vendored
@ -1 +0,0 @@
|
|||||||
../@tailwindcss/cli/dist/index.mjs
|
|
||||||
534
node_modules/.package-lock.json
generated
vendored
534
node_modules/.package-lock.json
generated
vendored
@ -1,534 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "dx-portfolio",
|
|
||||||
"lockfileVersion": 3,
|
|
||||||
"requires": true,
|
|
||||||
"packages": {
|
|
||||||
"node_modules/@ampproject/remapping": {
|
|
||||||
"version": "2.3.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz",
|
|
||||||
"integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==",
|
|
||||||
"license": "Apache-2.0",
|
|
||||||
"dependencies": {
|
|
||||||
"@jridgewell/gen-mapping": "^0.3.5",
|
|
||||||
"@jridgewell/trace-mapping": "^0.3.24"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=6.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@isaacs/fs-minipass": {
|
|
||||||
"version": "4.0.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz",
|
|
||||||
"integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==",
|
|
||||||
"license": "ISC",
|
|
||||||
"dependencies": {
|
|
||||||
"minipass": "^7.0.4"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=18.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@jridgewell/gen-mapping": {
|
|
||||||
"version": "0.3.8",
|
|
||||||
"resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.8.tgz",
|
|
||||||
"integrity": "sha512-imAbBGkb+ebQyxKgzv5Hu2nmROxoDOXHh80evxdoXNOrvAnVx7zimzc1Oo5h9RlfV4vPXaE2iM5pOFbvOCClWA==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"@jridgewell/set-array": "^1.2.1",
|
|
||||||
"@jridgewell/sourcemap-codec": "^1.4.10",
|
|
||||||
"@jridgewell/trace-mapping": "^0.3.24"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=6.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@jridgewell/resolve-uri": {
|
|
||||||
"version": "3.1.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
|
|
||||||
"integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=6.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@jridgewell/set-array": {
|
|
||||||
"version": "1.2.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz",
|
|
||||||
"integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==",
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=6.0.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@jridgewell/sourcemap-codec": {
|
|
||||||
"version": "1.5.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz",
|
|
||||||
"integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==",
|
|
||||||
"license": "MIT"
|
|
||||||
},
|
|
||||||
"node_modules/@jridgewell/trace-mapping": {
|
|
||||||
"version": "0.3.25",
|
|
||||||
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz",
|
|
||||||
"integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"@jridgewell/resolve-uri": "^3.1.0",
|
|
||||||
"@jridgewell/sourcemap-codec": "^1.4.14"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@parcel/watcher": {
|
|
||||||
"version": "2.5.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/@parcel/watcher/-/watcher-2.5.1.tgz",
|
|
||||||
"integrity": "sha512-dfUnCxiN9H4ap84DvD2ubjw+3vUNpstxa0TneY/Paat8a3R4uQZDLSvWjmznAY/DoahqTHl9V46HF/Zs3F29pg==",
|
|
||||||
"hasInstallScript": true,
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"detect-libc": "^1.0.3",
|
|
||||||
"is-glob": "^4.0.3",
|
|
||||||
"micromatch": "^4.0.5",
|
|
||||||
"node-addon-api": "^7.0.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 10.0.0"
|
|
||||||
},
|
|
||||||
"funding": {
|
|
||||||
"type": "opencollective",
|
|
||||||
"url": "https://opencollective.com/parcel"
|
|
||||||
},
|
|
||||||
"optionalDependencies": {
|
|
||||||
"@parcel/watcher-android-arm64": "2.5.1",
|
|
||||||
"@parcel/watcher-darwin-arm64": "2.5.1",
|
|
||||||
"@parcel/watcher-darwin-x64": "2.5.1",
|
|
||||||
"@parcel/watcher-freebsd-x64": "2.5.1",
|
|
||||||
"@parcel/watcher-linux-arm-glibc": "2.5.1",
|
|
||||||
"@parcel/watcher-linux-arm-musl": "2.5.1",
|
|
||||||
"@parcel/watcher-linux-arm64-glibc": "2.5.1",
|
|
||||||
"@parcel/watcher-linux-arm64-musl": "2.5.1",
|
|
||||||
"@parcel/watcher-linux-x64-glibc": "2.5.1",
|
|
||||||
"@parcel/watcher-linux-x64-musl": "2.5.1",
|
|
||||||
"@parcel/watcher-win32-arm64": "2.5.1",
|
|
||||||
"@parcel/watcher-win32-ia32": "2.5.1",
|
|
||||||
"@parcel/watcher-win32-x64": "2.5.1"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@parcel/watcher-darwin-arm64": {
|
|
||||||
"version": "2.5.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-arm64/-/watcher-darwin-arm64-2.5.1.tgz",
|
|
||||||
"integrity": "sha512-eAzPv5osDmZyBhou8PoF4i6RQXAfeKL9tjb3QzYuccXFMQU0ruIc/POh30ePnaOyD1UXdlKguHBmsTs53tVoPw==",
|
|
||||||
"cpu": [
|
|
||||||
"arm64"
|
|
||||||
],
|
|
||||||
"license": "MIT",
|
|
||||||
"optional": true,
|
|
||||||
"os": [
|
|
||||||
"darwin"
|
|
||||||
],
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 10.0.0"
|
|
||||||
},
|
|
||||||
"funding": {
|
|
||||||
"type": "opencollective",
|
|
||||||
"url": "https://opencollective.com/parcel"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@tailwindcss/cli": {
|
|
||||||
"version": "4.1.7",
|
|
||||||
"resolved": "https://registry.npmjs.org/@tailwindcss/cli/-/cli-4.1.7.tgz",
|
|
||||||
"integrity": "sha512-hJNjpov/UiJc9ZWH4j/eEQxqklADrD/71s+t8Y0wbyQVAwtLkSp+MeC/sHTb03X+28rfbe0fRXkiBsf73/IwPg==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"@parcel/watcher": "^2.5.1",
|
|
||||||
"@tailwindcss/node": "4.1.7",
|
|
||||||
"@tailwindcss/oxide": "4.1.7",
|
|
||||||
"enhanced-resolve": "^5.18.1",
|
|
||||||
"mri": "^1.2.0",
|
|
||||||
"picocolors": "^1.1.1",
|
|
||||||
"tailwindcss": "4.1.7"
|
|
||||||
},
|
|
||||||
"bin": {
|
|
||||||
"tailwindcss": "dist/index.mjs"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@tailwindcss/node": {
|
|
||||||
"version": "4.1.7",
|
|
||||||
"resolved": "https://registry.npmjs.org/@tailwindcss/node/-/node-4.1.7.tgz",
|
|
||||||
"integrity": "sha512-9rsOpdY9idRI2NH6CL4wORFY0+Q6fnx9XP9Ju+iq/0wJwGD5IByIgFmwVbyy4ymuyprj8Qh4ErxMKTUL4uNh3g==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"@ampproject/remapping": "^2.3.0",
|
|
||||||
"enhanced-resolve": "^5.18.1",
|
|
||||||
"jiti": "^2.4.2",
|
|
||||||
"lightningcss": "1.30.1",
|
|
||||||
"magic-string": "^0.30.17",
|
|
||||||
"source-map-js": "^1.2.1",
|
|
||||||
"tailwindcss": "4.1.7"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@tailwindcss/oxide": {
|
|
||||||
"version": "4.1.7",
|
|
||||||
"resolved": "https://registry.npmjs.org/@tailwindcss/oxide/-/oxide-4.1.7.tgz",
|
|
||||||
"integrity": "sha512-5SF95Ctm9DFiUyjUPnDGkoKItPX/k+xifcQhcqX5RA85m50jw1pT/KzjdvlqxRja45Y52nR4MR9fD1JYd7f8NQ==",
|
|
||||||
"hasInstallScript": true,
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"detect-libc": "^2.0.4",
|
|
||||||
"tar": "^7.4.3"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 10"
|
|
||||||
},
|
|
||||||
"optionalDependencies": {
|
|
||||||
"@tailwindcss/oxide-android-arm64": "4.1.7",
|
|
||||||
"@tailwindcss/oxide-darwin-arm64": "4.1.7",
|
|
||||||
"@tailwindcss/oxide-darwin-x64": "4.1.7",
|
|
||||||
"@tailwindcss/oxide-freebsd-x64": "4.1.7",
|
|
||||||
"@tailwindcss/oxide-linux-arm-gnueabihf": "4.1.7",
|
|
||||||
"@tailwindcss/oxide-linux-arm64-gnu": "4.1.7",
|
|
||||||
"@tailwindcss/oxide-linux-arm64-musl": "4.1.7",
|
|
||||||
"@tailwindcss/oxide-linux-x64-gnu": "4.1.7",
|
|
||||||
"@tailwindcss/oxide-linux-x64-musl": "4.1.7",
|
|
||||||
"@tailwindcss/oxide-wasm32-wasi": "4.1.7",
|
|
||||||
"@tailwindcss/oxide-win32-arm64-msvc": "4.1.7",
|
|
||||||
"@tailwindcss/oxide-win32-x64-msvc": "4.1.7"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@tailwindcss/oxide-darwin-arm64": {
|
|
||||||
"version": "4.1.7",
|
|
||||||
"resolved": "https://registry.npmjs.org/@tailwindcss/oxide-darwin-arm64/-/oxide-darwin-arm64-4.1.7.tgz",
|
|
||||||
"integrity": "sha512-81jUw9To7fimGGkuJ2W5h3/oGonTOZKZ8C2ghm/TTxbwvfSiFSDPd6/A/KE2N7Jp4mv3Ps9OFqg2fEKgZFfsvg==",
|
|
||||||
"cpu": [
|
|
||||||
"arm64"
|
|
||||||
],
|
|
||||||
"license": "MIT",
|
|
||||||
"optional": true,
|
|
||||||
"os": [
|
|
||||||
"darwin"
|
|
||||||
],
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 10"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/@tailwindcss/oxide/node_modules/detect-libc": {
|
|
||||||
"version": "2.0.4",
|
|
||||||
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.4.tgz",
|
|
||||||
"integrity": "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA==",
|
|
||||||
"license": "Apache-2.0",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=8"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/braces": {
|
|
||||||
"version": "3.0.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
|
|
||||||
"integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"fill-range": "^7.1.1"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=8"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/chownr": {
|
|
||||||
"version": "3.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz",
|
|
||||||
"integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==",
|
|
||||||
"license": "BlueOak-1.0.0",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=18"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/detect-libc": {
|
|
||||||
"version": "1.0.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz",
|
|
||||||
"integrity": "sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==",
|
|
||||||
"license": "Apache-2.0",
|
|
||||||
"bin": {
|
|
||||||
"detect-libc": "bin/detect-libc.js"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.10"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/enhanced-resolve": {
|
|
||||||
"version": "5.18.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.1.tgz",
|
|
||||||
"integrity": "sha512-ZSW3ma5GkcQBIpwZTSRAI8N71Uuwgs93IezB7mf7R60tC8ZbJideoDNKjHn2O9KIlx6rkGTTEk1xUCK2E1Y2Yg==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"graceful-fs": "^4.2.4",
|
|
||||||
"tapable": "^2.2.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=10.13.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/fill-range": {
|
|
||||||
"version": "7.1.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
|
|
||||||
"integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"to-regex-range": "^5.0.1"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=8"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/graceful-fs": {
|
|
||||||
"version": "4.2.11",
|
|
||||||
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
|
|
||||||
"integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==",
|
|
||||||
"license": "ISC"
|
|
||||||
},
|
|
||||||
"node_modules/is-extglob": {
|
|
||||||
"version": "2.1.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
|
|
||||||
"integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.10.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/is-glob": {
|
|
||||||
"version": "4.0.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
|
|
||||||
"integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"is-extglob": "^2.1.1"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.10.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/is-number": {
|
|
||||||
"version": "7.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
|
|
||||||
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.12.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/jiti": {
|
|
||||||
"version": "2.4.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/jiti/-/jiti-2.4.2.tgz",
|
|
||||||
"integrity": "sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A==",
|
|
||||||
"license": "MIT",
|
|
||||||
"bin": {
|
|
||||||
"jiti": "lib/jiti-cli.mjs"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/lightningcss": {
|
|
||||||
"version": "1.30.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/lightningcss/-/lightningcss-1.30.1.tgz",
|
|
||||||
"integrity": "sha512-xi6IyHML+c9+Q3W0S4fCQJOym42pyurFiJUHEcEyHS0CeKzia4yZDEsLlqOFykxOdHpNy0NmvVO31vcSqAxJCg==",
|
|
||||||
"license": "MPL-2.0",
|
|
||||||
"dependencies": {
|
|
||||||
"detect-libc": "^2.0.3"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 12.0.0"
|
|
||||||
},
|
|
||||||
"funding": {
|
|
||||||
"type": "opencollective",
|
|
||||||
"url": "https://opencollective.com/parcel"
|
|
||||||
},
|
|
||||||
"optionalDependencies": {
|
|
||||||
"lightningcss-darwin-arm64": "1.30.1",
|
|
||||||
"lightningcss-darwin-x64": "1.30.1",
|
|
||||||
"lightningcss-freebsd-x64": "1.30.1",
|
|
||||||
"lightningcss-linux-arm-gnueabihf": "1.30.1",
|
|
||||||
"lightningcss-linux-arm64-gnu": "1.30.1",
|
|
||||||
"lightningcss-linux-arm64-musl": "1.30.1",
|
|
||||||
"lightningcss-linux-x64-gnu": "1.30.1",
|
|
||||||
"lightningcss-linux-x64-musl": "1.30.1",
|
|
||||||
"lightningcss-win32-arm64-msvc": "1.30.1",
|
|
||||||
"lightningcss-win32-x64-msvc": "1.30.1"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/lightningcss-darwin-arm64": {
|
|
||||||
"version": "1.30.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/lightningcss-darwin-arm64/-/lightningcss-darwin-arm64-1.30.1.tgz",
|
|
||||||
"integrity": "sha512-c8JK7hyE65X1MHMN+Viq9n11RRC7hgin3HhYKhrMyaXflk5GVplZ60IxyoVtzILeKr+xAJwg6zK6sjTBJ0FKYQ==",
|
|
||||||
"cpu": [
|
|
||||||
"arm64"
|
|
||||||
],
|
|
||||||
"license": "MPL-2.0",
|
|
||||||
"optional": true,
|
|
||||||
"os": [
|
|
||||||
"darwin"
|
|
||||||
],
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 12.0.0"
|
|
||||||
},
|
|
||||||
"funding": {
|
|
||||||
"type": "opencollective",
|
|
||||||
"url": "https://opencollective.com/parcel"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/lightningcss/node_modules/detect-libc": {
|
|
||||||
"version": "2.0.4",
|
|
||||||
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.4.tgz",
|
|
||||||
"integrity": "sha512-3UDv+G9CsCKO1WKMGw9fwq/SWJYbI0c5Y7LU1AXYoDdbhE2AHQ6N6Nb34sG8Fj7T5APy8qXDCKuuIHd1BR0tVA==",
|
|
||||||
"license": "Apache-2.0",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=8"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/magic-string": {
|
|
||||||
"version": "0.30.17",
|
|
||||||
"resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz",
|
|
||||||
"integrity": "sha512-sNPKHvyjVf7gyjwS4xGTaW/mCnF8wnjtifKBEhxfZ7E/S8tQ0rssrwGNn6q8JH/ohItJfSQp9mBtQYuTlH5QnA==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"@jridgewell/sourcemap-codec": "^1.5.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/micromatch": {
|
|
||||||
"version": "4.0.8",
|
|
||||||
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
|
|
||||||
"integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"braces": "^3.0.3",
|
|
||||||
"picomatch": "^2.3.1"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=8.6"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/minipass": {
|
|
||||||
"version": "7.1.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz",
|
|
||||||
"integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==",
|
|
||||||
"license": "ISC",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=16 || 14 >=14.17"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/minizlib": {
|
|
||||||
"version": "3.0.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
|
|
||||||
"integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"minipass": "^7.1.2"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 18"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/mkdirp": {
|
|
||||||
"version": "3.0.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz",
|
|
||||||
"integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==",
|
|
||||||
"license": "MIT",
|
|
||||||
"bin": {
|
|
||||||
"mkdirp": "dist/cjs/src/bin.js"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=10"
|
|
||||||
},
|
|
||||||
"funding": {
|
|
||||||
"url": "https://github.com/sponsors/isaacs"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/mri": {
|
|
||||||
"version": "1.2.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz",
|
|
||||||
"integrity": "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==",
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=4"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/node-addon-api": {
|
|
||||||
"version": "7.1.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-7.1.1.tgz",
|
|
||||||
"integrity": "sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==",
|
|
||||||
"license": "MIT"
|
|
||||||
},
|
|
||||||
"node_modules/picocolors": {
|
|
||||||
"version": "1.1.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
|
|
||||||
"integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
|
|
||||||
"license": "ISC"
|
|
||||||
},
|
|
||||||
"node_modules/picomatch": {
|
|
||||||
"version": "2.3.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
|
|
||||||
"integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=8.6"
|
|
||||||
},
|
|
||||||
"funding": {
|
|
||||||
"url": "https://github.com/sponsors/jonschlinkert"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/source-map-js": {
|
|
||||||
"version": "1.2.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
|
|
||||||
"integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==",
|
|
||||||
"license": "BSD-3-Clause",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=0.10.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/tailwindcss": {
|
|
||||||
"version": "4.1.7",
|
|
||||||
"resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.7.tgz",
|
|
||||||
"integrity": "sha512-kr1o/ErIdNhTz8uzAYL7TpaUuzKIE6QPQ4qmSdxnoX/lo+5wmUHQA6h3L5yIqEImSRnAAURDirLu/BgiXGPAhg==",
|
|
||||||
"license": "MIT"
|
|
||||||
},
|
|
||||||
"node_modules/tapable": {
|
|
||||||
"version": "2.2.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.2.tgz",
|
|
||||||
"integrity": "sha512-Re10+NauLTMCudc7T5WLFLAwDhQ0JWdrMK+9B2M8zR5hRExKmsRDCBA7/aV/pNJFltmBFO5BAMlQFi/vq3nKOg==",
|
|
||||||
"license": "MIT",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=6"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/tar": {
|
|
||||||
"version": "7.4.3",
|
|
||||||
"resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz",
|
|
||||||
"integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==",
|
|
||||||
"license": "ISC",
|
|
||||||
"dependencies": {
|
|
||||||
"@isaacs/fs-minipass": "^4.0.0",
|
|
||||||
"chownr": "^3.0.0",
|
|
||||||
"minipass": "^7.1.2",
|
|
||||||
"minizlib": "^3.0.1",
|
|
||||||
"mkdirp": "^3.0.1",
|
|
||||||
"yallist": "^5.0.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=18"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/to-regex-range": {
|
|
||||||
"version": "5.0.1",
|
|
||||||
"resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
|
|
||||||
"integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
|
|
||||||
"license": "MIT",
|
|
||||||
"dependencies": {
|
|
||||||
"is-number": "^7.0.0"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">=8.0"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"node_modules/yallist": {
|
|
||||||
"version": "5.0.0",
|
|
||||||
"resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz",
|
|
||||||
"integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==",
|
|
||||||
"license": "BlueOak-1.0.0",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=18"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
202
node_modules/@ampproject/remapping/LICENSE
generated
vendored
202
node_modules/@ampproject/remapping/LICENSE
generated
vendored
@ -1,202 +0,0 @@
|
|||||||
|
|
||||||
Apache License
|
|
||||||
Version 2.0, January 2004
|
|
||||||
http://www.apache.org/licenses/
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
||||||
|
|
||||||
1. Definitions.
|
|
||||||
|
|
||||||
"License" shall mean the terms and conditions for use, reproduction,
|
|
||||||
and distribution as defined by Sections 1 through 9 of this document.
|
|
||||||
|
|
||||||
"Licensor" shall mean the copyright owner or entity authorized by
|
|
||||||
the copyright owner that is granting the License.
|
|
||||||
|
|
||||||
"Legal Entity" shall mean the union of the acting entity and all
|
|
||||||
other entities that control, are controlled by, or are under common
|
|
||||||
control with that entity. For the purposes of this definition,
|
|
||||||
"control" means (i) the power, direct or indirect, to cause the
|
|
||||||
direction or management of such entity, whether by contract or
|
|
||||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
||||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
|
||||||
|
|
||||||
"You" (or "Your") shall mean an individual or Legal Entity
|
|
||||||
exercising permissions granted by this License.
|
|
||||||
|
|
||||||
"Source" form shall mean the preferred form for making modifications,
|
|
||||||
including but not limited to software source code, documentation
|
|
||||||
source, and configuration files.
|
|
||||||
|
|
||||||
"Object" form shall mean any form resulting from mechanical
|
|
||||||
transformation or translation of a Source form, including but
|
|
||||||
not limited to compiled object code, generated documentation,
|
|
||||||
and conversions to other media types.
|
|
||||||
|
|
||||||
"Work" shall mean the work of authorship, whether in Source or
|
|
||||||
Object form, made available under the License, as indicated by a
|
|
||||||
copyright notice that is included in or attached to the work
|
|
||||||
(an example is provided in the Appendix below).
|
|
||||||
|
|
||||||
"Derivative Works" shall mean any work, whether in Source or Object
|
|
||||||
form, that is based on (or derived from) the Work and for which the
|
|
||||||
editorial revisions, annotations, elaborations, or other modifications
|
|
||||||
represent, as a whole, an original work of authorship. For the purposes
|
|
||||||
of this License, Derivative Works shall not include works that remain
|
|
||||||
separable from, or merely link (or bind by name) to the interfaces of,
|
|
||||||
the Work and Derivative Works thereof.
|
|
||||||
|
|
||||||
"Contribution" shall mean any work of authorship, including
|
|
||||||
the original version of the Work and any modifications or additions
|
|
||||||
to that Work or Derivative Works thereof, that is intentionally
|
|
||||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
|
||||||
or by an individual or Legal Entity authorized to submit on behalf of
|
|
||||||
the copyright owner. For the purposes of this definition, "submitted"
|
|
||||||
means any form of electronic, verbal, or written communication sent
|
|
||||||
to the Licensor or its representatives, including but not limited to
|
|
||||||
communication on electronic mailing lists, source code control systems,
|
|
||||||
and issue tracking systems that are managed by, or on behalf of, the
|
|
||||||
Licensor for the purpose of discussing and improving the Work, but
|
|
||||||
excluding communication that is conspicuously marked or otherwise
|
|
||||||
designated in writing by the copyright owner as "Not a Contribution."
|
|
||||||
|
|
||||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
|
||||||
on behalf of whom a Contribution has been received by Licensor and
|
|
||||||
subsequently incorporated within the Work.
|
|
||||||
|
|
||||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
copyright license to reproduce, prepare Derivative Works of,
|
|
||||||
publicly display, publicly perform, sublicense, and distribute the
|
|
||||||
Work and such Derivative Works in Source or Object form.
|
|
||||||
|
|
||||||
3. Grant of Patent License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
(except as stated in this section) patent license to make, have made,
|
|
||||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
||||||
where such license applies only to those patent claims licensable
|
|
||||||
by such Contributor that are necessarily infringed by their
|
|
||||||
Contribution(s) alone or by combination of their Contribution(s)
|
|
||||||
with the Work to which such Contribution(s) was submitted. If You
|
|
||||||
institute patent litigation against any entity (including a
|
|
||||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
||||||
or a Contribution incorporated within the Work constitutes direct
|
|
||||||
or contributory patent infringement, then any patent licenses
|
|
||||||
granted to You under this License for that Work shall terminate
|
|
||||||
as of the date such litigation is filed.
|
|
||||||
|
|
||||||
4. Redistribution. You may reproduce and distribute copies of the
|
|
||||||
Work or Derivative Works thereof in any medium, with or without
|
|
||||||
modifications, and in Source or Object form, provided that You
|
|
||||||
meet the following conditions:
|
|
||||||
|
|
||||||
(a) You must give any other recipients of the Work or
|
|
||||||
Derivative Works a copy of this License; and
|
|
||||||
|
|
||||||
(b) You must cause any modified files to carry prominent notices
|
|
||||||
stating that You changed the files; and
|
|
||||||
|
|
||||||
(c) You must retain, in the Source form of any Derivative Works
|
|
||||||
that You distribute, all copyright, patent, trademark, and
|
|
||||||
attribution notices from the Source form of the Work,
|
|
||||||
excluding those notices that do not pertain to any part of
|
|
||||||
the Derivative Works; and
|
|
||||||
|
|
||||||
(d) If the Work includes a "NOTICE" text file as part of its
|
|
||||||
distribution, then any Derivative Works that You distribute must
|
|
||||||
include a readable copy of the attribution notices contained
|
|
||||||
within such NOTICE file, excluding those notices that do not
|
|
||||||
pertain to any part of the Derivative Works, in at least one
|
|
||||||
of the following places: within a NOTICE text file distributed
|
|
||||||
as part of the Derivative Works; within the Source form or
|
|
||||||
documentation, if provided along with the Derivative Works; or,
|
|
||||||
within a display generated by the Derivative Works, if and
|
|
||||||
wherever such third-party notices normally appear. The contents
|
|
||||||
of the NOTICE file are for informational purposes only and
|
|
||||||
do not modify the License. You may add Your own attribution
|
|
||||||
notices within Derivative Works that You distribute, alongside
|
|
||||||
or as an addendum to the NOTICE text from the Work, provided
|
|
||||||
that such additional attribution notices cannot be construed
|
|
||||||
as modifying the License.
|
|
||||||
|
|
||||||
You may add Your own copyright statement to Your modifications and
|
|
||||||
may provide additional or different license terms and conditions
|
|
||||||
for use, reproduction, or distribution of Your modifications, or
|
|
||||||
for any such Derivative Works as a whole, provided Your use,
|
|
||||||
reproduction, and distribution of the Work otherwise complies with
|
|
||||||
the conditions stated in this License.
|
|
||||||
|
|
||||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
||||||
any Contribution intentionally submitted for inclusion in the Work
|
|
||||||
by You to the Licensor shall be under the terms and conditions of
|
|
||||||
this License, without any additional terms or conditions.
|
|
||||||
Notwithstanding the above, nothing herein shall supersede or modify
|
|
||||||
the terms of any separate license agreement you may have executed
|
|
||||||
with Licensor regarding such Contributions.
|
|
||||||
|
|
||||||
6. Trademarks. This License does not grant permission to use the trade
|
|
||||||
names, trademarks, service marks, or product names of the Licensor,
|
|
||||||
except as required for reasonable and customary use in describing the
|
|
||||||
origin of the Work and reproducing the content of the NOTICE file.
|
|
||||||
|
|
||||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
|
||||||
agreed to in writing, Licensor provides the Work (and each
|
|
||||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
implied, including, without limitation, any warranties or conditions
|
|
||||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
||||||
appropriateness of using or redistributing the Work and assume any
|
|
||||||
risks associated with Your exercise of permissions under this License.
|
|
||||||
|
|
||||||
8. Limitation of Liability. In no event and under no legal theory,
|
|
||||||
whether in tort (including negligence), contract, or otherwise,
|
|
||||||
unless required by applicable law (such as deliberate and grossly
|
|
||||||
negligent acts) or agreed to in writing, shall any Contributor be
|
|
||||||
liable to You for damages, including any direct, indirect, special,
|
|
||||||
incidental, or consequential damages of any character arising as a
|
|
||||||
result of this License or out of the use or inability to use the
|
|
||||||
Work (including but not limited to damages for loss of goodwill,
|
|
||||||
work stoppage, computer failure or malfunction, or any and all
|
|
||||||
other commercial damages or losses), even if such Contributor
|
|
||||||
has been advised of the possibility of such damages.
|
|
||||||
|
|
||||||
9. Accepting Warranty or Additional Liability. While redistributing
|
|
||||||
the Work or Derivative Works thereof, You may choose to offer,
|
|
||||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
|
||||||
or other liability obligations and/or rights consistent with this
|
|
||||||
License. However, in accepting such obligations, You may act only
|
|
||||||
on Your own behalf and on Your sole responsibility, not on behalf
|
|
||||||
of any other Contributor, and only if You agree to indemnify,
|
|
||||||
defend, and hold each Contributor harmless for any liability
|
|
||||||
incurred by, or claims asserted against, such Contributor by reason
|
|
||||||
of your accepting any such warranty or additional liability.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
APPENDIX: How to apply the Apache License to your work.
|
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
|
||||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
|
||||||
replaced with your own identifying information. (Don't include
|
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
|
||||||
comment syntax for the file format. We also recommend that a
|
|
||||||
file or class name and description of purpose be included on the
|
|
||||||
same "printed page" as the copyright notice for easier
|
|
||||||
identification within third-party archives.
|
|
||||||
|
|
||||||
Copyright [yyyy] [name of copyright owner]
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
218
node_modules/@ampproject/remapping/README.md
generated
vendored
218
node_modules/@ampproject/remapping/README.md
generated
vendored
@ -1,218 +0,0 @@
|
|||||||
# @ampproject/remapping
|
|
||||||
|
|
||||||
> Remap sequential sourcemaps through transformations to point at the original source code
|
|
||||||
|
|
||||||
Remapping allows you to take the sourcemaps generated through transforming your code and "remap"
|
|
||||||
them to the original source locations. Think "my minified code, transformed with babel and bundled
|
|
||||||
with webpack", all pointing to the correct location in your original source code.
|
|
||||||
|
|
||||||
With remapping, none of your source code transformations need to be aware of the input's sourcemap,
|
|
||||||
they only need to generate an output sourcemap. This greatly simplifies building custom
|
|
||||||
transformations (think a find-and-replace).
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
```sh
|
|
||||||
npm install @ampproject/remapping
|
|
||||||
```
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
function remapping(
|
|
||||||
map: SourceMap | SourceMap[],
|
|
||||||
loader: (file: string, ctx: LoaderContext) => (SourceMap | null | undefined),
|
|
||||||
options?: { excludeContent: boolean, decodedMappings: boolean }
|
|
||||||
): SourceMap;
|
|
||||||
|
|
||||||
// LoaderContext gives the loader the importing sourcemap, tree depth, the ability to override the
|
|
||||||
// "source" location (where child sources are resolved relative to, or the location of original
|
|
||||||
// source), and the ability to override the "content" of an original source for inclusion in the
|
|
||||||
// output sourcemap.
|
|
||||||
type LoaderContext = {
|
|
||||||
readonly importer: string;
|
|
||||||
readonly depth: number;
|
|
||||||
source: string;
|
|
||||||
content: string | null | undefined;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
`remapping` takes the final output sourcemap, and a `loader` function. For every source file pointer
|
|
||||||
in the sourcemap, the `loader` will be called with the resolved path. If the path itself represents
|
|
||||||
a transformed file (it has a sourcmap associated with it), then the `loader` should return that
|
|
||||||
sourcemap. If not, the path will be treated as an original, untransformed source code.
|
|
||||||
|
|
||||||
```js
|
|
||||||
// Babel transformed "helloworld.js" into "transformed.js"
|
|
||||||
const transformedMap = JSON.stringify({
|
|
||||||
file: 'transformed.js',
|
|
||||||
// 1st column of 2nd line of output file translates into the 1st source
|
|
||||||
// file, line 3, column 2
|
|
||||||
mappings: ';CAEE',
|
|
||||||
sources: ['helloworld.js'],
|
|
||||||
version: 3,
|
|
||||||
});
|
|
||||||
|
|
||||||
// Uglify minified "transformed.js" into "transformed.min.js"
|
|
||||||
const minifiedTransformedMap = JSON.stringify({
|
|
||||||
file: 'transformed.min.js',
|
|
||||||
// 0th column of 1st line of output file translates into the 1st source
|
|
||||||
// file, line 2, column 1.
|
|
||||||
mappings: 'AACC',
|
|
||||||
names: [],
|
|
||||||
sources: ['transformed.js'],
|
|
||||||
version: 3,
|
|
||||||
});
|
|
||||||
|
|
||||||
const remapped = remapping(
|
|
||||||
minifiedTransformedMap,
|
|
||||||
(file, ctx) => {
|
|
||||||
|
|
||||||
// The "transformed.js" file is an transformed file.
|
|
||||||
if (file === 'transformed.js') {
|
|
||||||
// The root importer is empty.
|
|
||||||
console.assert(ctx.importer === '');
|
|
||||||
// The depth in the sourcemap tree we're currently loading.
|
|
||||||
// The root `minifiedTransformedMap` is depth 0, and its source children are depth 1, etc.
|
|
||||||
console.assert(ctx.depth === 1);
|
|
||||||
|
|
||||||
return transformedMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Loader will be called to load transformedMap's source file pointers as well.
|
|
||||||
console.assert(file === 'helloworld.js');
|
|
||||||
// `transformed.js`'s sourcemap points into `helloworld.js`.
|
|
||||||
console.assert(ctx.importer === 'transformed.js');
|
|
||||||
// This is a source child of `transformed`, which is a source child of `minifiedTransformedMap`.
|
|
||||||
console.assert(ctx.depth === 2);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
console.log(remapped);
|
|
||||||
// {
|
|
||||||
// file: 'transpiled.min.js',
|
|
||||||
// mappings: 'AAEE',
|
|
||||||
// sources: ['helloworld.js'],
|
|
||||||
// version: 3,
|
|
||||||
// };
|
|
||||||
```
|
|
||||||
|
|
||||||
In this example, `loader` will be called twice:
|
|
||||||
|
|
||||||
1. `"transformed.js"`, the first source file pointer in the `minifiedTransformedMap`. We return the
|
|
||||||
associated sourcemap for it (its a transformed file, after all) so that sourcemap locations can
|
|
||||||
be traced through it into the source files it represents.
|
|
||||||
2. `"helloworld.js"`, our original, unmodified source code. This file does not have a sourcemap, so
|
|
||||||
we return `null`.
|
|
||||||
|
|
||||||
The `remapped` sourcemap now points from `transformed.min.js` into locations in `helloworld.js`. If
|
|
||||||
you were to read the `mappings`, it says "0th column of the first line output line points to the 1st
|
|
||||||
column of the 2nd line of the file `helloworld.js`".
|
|
||||||
|
|
||||||
### Multiple transformations of a file
|
|
||||||
|
|
||||||
As a convenience, if you have multiple single-source transformations of a file, you may pass an
|
|
||||||
array of sourcemap files in the order of most-recent transformation sourcemap first. Note that this
|
|
||||||
changes the `importer` and `depth` of each call to our loader. So our above example could have been
|
|
||||||
written as:
|
|
||||||
|
|
||||||
```js
|
|
||||||
const remapped = remapping(
|
|
||||||
[minifiedTransformedMap, transformedMap],
|
|
||||||
() => null
|
|
||||||
);
|
|
||||||
|
|
||||||
console.log(remapped);
|
|
||||||
// {
|
|
||||||
// file: 'transpiled.min.js',
|
|
||||||
// mappings: 'AAEE',
|
|
||||||
// sources: ['helloworld.js'],
|
|
||||||
// version: 3,
|
|
||||||
// };
|
|
||||||
```
|
|
||||||
|
|
||||||
### Advanced control of the loading graph
|
|
||||||
|
|
||||||
#### `source`
|
|
||||||
|
|
||||||
The `source` property can overridden to any value to change the location of the current load. Eg,
|
|
||||||
for an original source file, it allows us to change the location to the original source regardless
|
|
||||||
of what the sourcemap source entry says. And for transformed files, it allows us to change the
|
|
||||||
relative resolving location for child sources of the loaded sourcemap.
|
|
||||||
|
|
||||||
```js
|
|
||||||
const remapped = remapping(
|
|
||||||
minifiedTransformedMap,
|
|
||||||
(file, ctx) => {
|
|
||||||
|
|
||||||
if (file === 'transformed.js') {
|
|
||||||
// We pretend the transformed.js file actually exists in the 'src/' directory. When the nested
|
|
||||||
// source files are loaded, they will now be relative to `src/`.
|
|
||||||
ctx.source = 'src/transformed.js';
|
|
||||||
return transformedMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.assert(file === 'src/helloworld.js');
|
|
||||||
// We could futher change the source of this original file, eg, to be inside a nested directory
|
|
||||||
// itself. This will be reflected in the remapped sourcemap.
|
|
||||||
ctx.source = 'src/nested/transformed.js';
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
console.log(remapped);
|
|
||||||
// {
|
|
||||||
// …,
|
|
||||||
// sources: ['src/nested/helloworld.js'],
|
|
||||||
// };
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
#### `content`
|
|
||||||
|
|
||||||
The `content` property can be overridden when we encounter an original source file. Eg, this allows
|
|
||||||
you to manually provide the source content of the original file regardless of whether the
|
|
||||||
`sourcesContent` field is present in the parent sourcemap. It can also be set to `null` to remove
|
|
||||||
the source content.
|
|
||||||
|
|
||||||
```js
|
|
||||||
const remapped = remapping(
|
|
||||||
minifiedTransformedMap,
|
|
||||||
(file, ctx) => {
|
|
||||||
|
|
||||||
if (file === 'transformed.js') {
|
|
||||||
// transformedMap does not include a `sourcesContent` field, so usually the remapped sourcemap
|
|
||||||
// would not include any `sourcesContent` values.
|
|
||||||
return transformedMap;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.assert(file === 'helloworld.js');
|
|
||||||
// We can read the file to provide the source content.
|
|
||||||
ctx.content = fs.readFileSync(file, 'utf8');
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
console.log(remapped);
|
|
||||||
// {
|
|
||||||
// …,
|
|
||||||
// sourcesContent: [
|
|
||||||
// 'console.log("Hello world!")',
|
|
||||||
// ],
|
|
||||||
// };
|
|
||||||
```
|
|
||||||
|
|
||||||
### Options
|
|
||||||
|
|
||||||
#### excludeContent
|
|
||||||
|
|
||||||
By default, `excludeContent` is `false`. Passing `{ excludeContent: true }` will exclude the
|
|
||||||
`sourcesContent` field from the returned sourcemap. This is mainly useful when you want to reduce
|
|
||||||
the size out the sourcemap.
|
|
||||||
|
|
||||||
#### decodedMappings
|
|
||||||
|
|
||||||
By default, `decodedMappings` is `false`. Passing `{ decodedMappings: true }` will leave the
|
|
||||||
`mappings` field in a [decoded state](https://github.com/rich-harris/sourcemap-codec) instead of
|
|
||||||
encoding into a VLQ string.
|
|
||||||
197
node_modules/@ampproject/remapping/dist/remapping.mjs
generated
vendored
197
node_modules/@ampproject/remapping/dist/remapping.mjs
generated
vendored
@ -1,197 +0,0 @@
|
|||||||
import { decodedMappings, traceSegment, TraceMap } from '@jridgewell/trace-mapping';
|
|
||||||
import { GenMapping, maybeAddSegment, setSourceContent, setIgnore, toDecodedMap, toEncodedMap } from '@jridgewell/gen-mapping';
|
|
||||||
|
|
||||||
const SOURCELESS_MAPPING = /* #__PURE__ */ SegmentObject('', -1, -1, '', null, false);
|
|
||||||
const EMPTY_SOURCES = [];
|
|
||||||
function SegmentObject(source, line, column, name, content, ignore) {
|
|
||||||
return { source, line, column, name, content, ignore };
|
|
||||||
}
|
|
||||||
function Source(map, sources, source, content, ignore) {
|
|
||||||
return {
|
|
||||||
map,
|
|
||||||
sources,
|
|
||||||
source,
|
|
||||||
content,
|
|
||||||
ignore,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
|
||||||
* (which may themselves be SourceMapTrees).
|
|
||||||
*/
|
|
||||||
function MapSource(map, sources) {
|
|
||||||
return Source(map, sources, '', null, false);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
|
||||||
* segment tracing ends at the `OriginalSource`.
|
|
||||||
*/
|
|
||||||
function OriginalSource(source, content, ignore) {
|
|
||||||
return Source(null, EMPTY_SOURCES, source, content, ignore);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
|
||||||
* resolving each mapping in terms of the original source files.
|
|
||||||
*/
|
|
||||||
function traceMappings(tree) {
|
|
||||||
// TODO: Eventually support sourceRoot, which has to be removed because the sources are already
|
|
||||||
// fully resolved. We'll need to make sources relative to the sourceRoot before adding them.
|
|
||||||
const gen = new GenMapping({ file: tree.map.file });
|
|
||||||
const { sources: rootSources, map } = tree;
|
|
||||||
const rootNames = map.names;
|
|
||||||
const rootMappings = decodedMappings(map);
|
|
||||||
for (let i = 0; i < rootMappings.length; i++) {
|
|
||||||
const segments = rootMappings[i];
|
|
||||||
for (let j = 0; j < segments.length; j++) {
|
|
||||||
const segment = segments[j];
|
|
||||||
const genCol = segment[0];
|
|
||||||
let traced = SOURCELESS_MAPPING;
|
|
||||||
// 1-length segments only move the current generated column, there's no source information
|
|
||||||
// to gather from it.
|
|
||||||
if (segment.length !== 1) {
|
|
||||||
const source = rootSources[segment[1]];
|
|
||||||
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
|
|
||||||
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
|
|
||||||
// respective segment into an original source.
|
|
||||||
if (traced == null)
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
const { column, line, name, content, source, ignore } = traced;
|
|
||||||
maybeAddSegment(gen, i, genCol, source, line, column, name);
|
|
||||||
if (source && content != null)
|
|
||||||
setSourceContent(gen, source, content);
|
|
||||||
if (ignore)
|
|
||||||
setIgnore(gen, source, true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return gen;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
|
||||||
* child SourceMapTrees, until we find the original source map.
|
|
||||||
*/
|
|
||||||
function originalPositionFor(source, line, column, name) {
|
|
||||||
if (!source.map) {
|
|
||||||
return SegmentObject(source.source, line, column, name, source.content, source.ignore);
|
|
||||||
}
|
|
||||||
const segment = traceSegment(source.map, line, column);
|
|
||||||
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
|
|
||||||
if (segment == null)
|
|
||||||
return null;
|
|
||||||
// 1-length segments only move the current generated column, there's no source information
|
|
||||||
// to gather from it.
|
|
||||||
if (segment.length === 1)
|
|
||||||
return SOURCELESS_MAPPING;
|
|
||||||
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
|
|
||||||
}
|
|
||||||
|
|
||||||
function asArray(value) {
|
|
||||||
if (Array.isArray(value))
|
|
||||||
return value;
|
|
||||||
return [value];
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Recursively builds a tree structure out of sourcemap files, with each node
|
|
||||||
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
|
||||||
* `OriginalSource`s and `SourceMapTree`s.
|
|
||||||
*
|
|
||||||
* Every sourcemap is composed of a collection of source files and mappings
|
|
||||||
* into locations of those source files. When we generate a `SourceMapTree` for
|
|
||||||
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
|
||||||
* does not have an associated sourcemap, it is considered an original,
|
|
||||||
* unmodified source file.
|
|
||||||
*/
|
|
||||||
function buildSourceMapTree(input, loader) {
|
|
||||||
const maps = asArray(input).map((m) => new TraceMap(m, ''));
|
|
||||||
const map = maps.pop();
|
|
||||||
for (let i = 0; i < maps.length; i++) {
|
|
||||||
if (maps[i].sources.length > 1) {
|
|
||||||
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
|
|
||||||
'Did you specify these with the most recent transformation maps first?');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let tree = build(map, loader, '', 0);
|
|
||||||
for (let i = maps.length - 1; i >= 0; i--) {
|
|
||||||
tree = MapSource(maps[i], [tree]);
|
|
||||||
}
|
|
||||||
return tree;
|
|
||||||
}
|
|
||||||
function build(map, loader, importer, importerDepth) {
|
|
||||||
const { resolvedSources, sourcesContent, ignoreList } = map;
|
|
||||||
const depth = importerDepth + 1;
|
|
||||||
const children = resolvedSources.map((sourceFile, i) => {
|
|
||||||
// The loading context gives the loader more information about why this file is being loaded
|
|
||||||
// (eg, from which importer). It also allows the loader to override the location of the loaded
|
|
||||||
// sourcemap/original source, or to override the content in the sourcesContent field if it's
|
|
||||||
// an unmodified source file.
|
|
||||||
const ctx = {
|
|
||||||
importer,
|
|
||||||
depth,
|
|
||||||
source: sourceFile || '',
|
|
||||||
content: undefined,
|
|
||||||
ignore: undefined,
|
|
||||||
};
|
|
||||||
// Use the provided loader callback to retrieve the file's sourcemap.
|
|
||||||
// TODO: We should eventually support async loading of sourcemap files.
|
|
||||||
const sourceMap = loader(ctx.source, ctx);
|
|
||||||
const { source, content, ignore } = ctx;
|
|
||||||
// If there is a sourcemap, then we need to recurse into it to load its source files.
|
|
||||||
if (sourceMap)
|
|
||||||
return build(new TraceMap(sourceMap, source), loader, source, depth);
|
|
||||||
// Else, it's an unmodified source file.
|
|
||||||
// The contents of this unmodified source file can be overridden via the loader context,
|
|
||||||
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
|
|
||||||
// the importing sourcemap's `sourcesContent` field.
|
|
||||||
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
|
|
||||||
const ignored = ignore !== undefined ? ignore : ignoreList ? ignoreList.includes(i) : false;
|
|
||||||
return OriginalSource(source, sourceContent, ignored);
|
|
||||||
});
|
|
||||||
return MapSource(map, children);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
|
||||||
* provided to it.
|
|
||||||
*/
|
|
||||||
class SourceMap {
|
|
||||||
constructor(map, options) {
|
|
||||||
const out = options.decodedMappings ? toDecodedMap(map) : toEncodedMap(map);
|
|
||||||
this.version = out.version; // SourceMap spec says this should be first.
|
|
||||||
this.file = out.file;
|
|
||||||
this.mappings = out.mappings;
|
|
||||||
this.names = out.names;
|
|
||||||
this.ignoreList = out.ignoreList;
|
|
||||||
this.sourceRoot = out.sourceRoot;
|
|
||||||
this.sources = out.sources;
|
|
||||||
if (!options.excludeContent) {
|
|
||||||
this.sourcesContent = out.sourcesContent;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
toString() {
|
|
||||||
return JSON.stringify(this);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Traces through all the mappings in the root sourcemap, through the sources
|
|
||||||
* (and their sourcemaps), all the way back to the original source location.
|
|
||||||
*
|
|
||||||
* `loader` will be called every time we encounter a source file. If it returns
|
|
||||||
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
|
||||||
* it returns a falsey value, that source file is treated as an original,
|
|
||||||
* unmodified source file.
|
|
||||||
*
|
|
||||||
* Pass `excludeContent` to exclude any self-containing source file content
|
|
||||||
* from the output sourcemap.
|
|
||||||
*
|
|
||||||
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
|
||||||
* VLQ encoded) mappings.
|
|
||||||
*/
|
|
||||||
function remapping(input, loader, options) {
|
|
||||||
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
|
|
||||||
const tree = buildSourceMapTree(input, loader);
|
|
||||||
return new SourceMap(traceMappings(tree), opts);
|
|
||||||
}
|
|
||||||
|
|
||||||
export { remapping as default };
|
|
||||||
//# sourceMappingURL=remapping.mjs.map
|
|
||||||
1
node_modules/@ampproject/remapping/dist/remapping.mjs.map
generated
vendored
1
node_modules/@ampproject/remapping/dist/remapping.mjs.map
generated
vendored
File diff suppressed because one or more lines are too long
202
node_modules/@ampproject/remapping/dist/remapping.umd.js
generated
vendored
202
node_modules/@ampproject/remapping/dist/remapping.umd.js
generated
vendored
@ -1,202 +0,0 @@
|
|||||||
(function (global, factory) {
|
|
||||||
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory(require('@jridgewell/trace-mapping'), require('@jridgewell/gen-mapping')) :
|
|
||||||
typeof define === 'function' && define.amd ? define(['@jridgewell/trace-mapping', '@jridgewell/gen-mapping'], factory) :
|
|
||||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.remapping = factory(global.traceMapping, global.genMapping));
|
|
||||||
})(this, (function (traceMapping, genMapping) { 'use strict';
|
|
||||||
|
|
||||||
const SOURCELESS_MAPPING = /* #__PURE__ */ SegmentObject('', -1, -1, '', null, false);
|
|
||||||
const EMPTY_SOURCES = [];
|
|
||||||
function SegmentObject(source, line, column, name, content, ignore) {
|
|
||||||
return { source, line, column, name, content, ignore };
|
|
||||||
}
|
|
||||||
function Source(map, sources, source, content, ignore) {
|
|
||||||
return {
|
|
||||||
map,
|
|
||||||
sources,
|
|
||||||
source,
|
|
||||||
content,
|
|
||||||
ignore,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
|
||||||
* (which may themselves be SourceMapTrees).
|
|
||||||
*/
|
|
||||||
function MapSource(map, sources) {
|
|
||||||
return Source(map, sources, '', null, false);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
|
||||||
* segment tracing ends at the `OriginalSource`.
|
|
||||||
*/
|
|
||||||
function OriginalSource(source, content, ignore) {
|
|
||||||
return Source(null, EMPTY_SOURCES, source, content, ignore);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
|
||||||
* resolving each mapping in terms of the original source files.
|
|
||||||
*/
|
|
||||||
function traceMappings(tree) {
|
|
||||||
// TODO: Eventually support sourceRoot, which has to be removed because the sources are already
|
|
||||||
// fully resolved. We'll need to make sources relative to the sourceRoot before adding them.
|
|
||||||
const gen = new genMapping.GenMapping({ file: tree.map.file });
|
|
||||||
const { sources: rootSources, map } = tree;
|
|
||||||
const rootNames = map.names;
|
|
||||||
const rootMappings = traceMapping.decodedMappings(map);
|
|
||||||
for (let i = 0; i < rootMappings.length; i++) {
|
|
||||||
const segments = rootMappings[i];
|
|
||||||
for (let j = 0; j < segments.length; j++) {
|
|
||||||
const segment = segments[j];
|
|
||||||
const genCol = segment[0];
|
|
||||||
let traced = SOURCELESS_MAPPING;
|
|
||||||
// 1-length segments only move the current generated column, there's no source information
|
|
||||||
// to gather from it.
|
|
||||||
if (segment.length !== 1) {
|
|
||||||
const source = rootSources[segment[1]];
|
|
||||||
traced = originalPositionFor(source, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : '');
|
|
||||||
// If the trace is invalid, then the trace ran into a sourcemap that doesn't contain a
|
|
||||||
// respective segment into an original source.
|
|
||||||
if (traced == null)
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
const { column, line, name, content, source, ignore } = traced;
|
|
||||||
genMapping.maybeAddSegment(gen, i, genCol, source, line, column, name);
|
|
||||||
if (source && content != null)
|
|
||||||
genMapping.setSourceContent(gen, source, content);
|
|
||||||
if (ignore)
|
|
||||||
genMapping.setIgnore(gen, source, true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return gen;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
|
||||||
* child SourceMapTrees, until we find the original source map.
|
|
||||||
*/
|
|
||||||
function originalPositionFor(source, line, column, name) {
|
|
||||||
if (!source.map) {
|
|
||||||
return SegmentObject(source.source, line, column, name, source.content, source.ignore);
|
|
||||||
}
|
|
||||||
const segment = traceMapping.traceSegment(source.map, line, column);
|
|
||||||
// If we couldn't find a segment, then this doesn't exist in the sourcemap.
|
|
||||||
if (segment == null)
|
|
||||||
return null;
|
|
||||||
// 1-length segments only move the current generated column, there's no source information
|
|
||||||
// to gather from it.
|
|
||||||
if (segment.length === 1)
|
|
||||||
return SOURCELESS_MAPPING;
|
|
||||||
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
|
|
||||||
}
|
|
||||||
|
|
||||||
function asArray(value) {
|
|
||||||
if (Array.isArray(value))
|
|
||||||
return value;
|
|
||||||
return [value];
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Recursively builds a tree structure out of sourcemap files, with each node
|
|
||||||
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
|
||||||
* `OriginalSource`s and `SourceMapTree`s.
|
|
||||||
*
|
|
||||||
* Every sourcemap is composed of a collection of source files and mappings
|
|
||||||
* into locations of those source files. When we generate a `SourceMapTree` for
|
|
||||||
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
|
||||||
* does not have an associated sourcemap, it is considered an original,
|
|
||||||
* unmodified source file.
|
|
||||||
*/
|
|
||||||
function buildSourceMapTree(input, loader) {
|
|
||||||
const maps = asArray(input).map((m) => new traceMapping.TraceMap(m, ''));
|
|
||||||
const map = maps.pop();
|
|
||||||
for (let i = 0; i < maps.length; i++) {
|
|
||||||
if (maps[i].sources.length > 1) {
|
|
||||||
throw new Error(`Transformation map ${i} must have exactly one source file.\n` +
|
|
||||||
'Did you specify these with the most recent transformation maps first?');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let tree = build(map, loader, '', 0);
|
|
||||||
for (let i = maps.length - 1; i >= 0; i--) {
|
|
||||||
tree = MapSource(maps[i], [tree]);
|
|
||||||
}
|
|
||||||
return tree;
|
|
||||||
}
|
|
||||||
function build(map, loader, importer, importerDepth) {
|
|
||||||
const { resolvedSources, sourcesContent, ignoreList } = map;
|
|
||||||
const depth = importerDepth + 1;
|
|
||||||
const children = resolvedSources.map((sourceFile, i) => {
|
|
||||||
// The loading context gives the loader more information about why this file is being loaded
|
|
||||||
// (eg, from which importer). It also allows the loader to override the location of the loaded
|
|
||||||
// sourcemap/original source, or to override the content in the sourcesContent field if it's
|
|
||||||
// an unmodified source file.
|
|
||||||
const ctx = {
|
|
||||||
importer,
|
|
||||||
depth,
|
|
||||||
source: sourceFile || '',
|
|
||||||
content: undefined,
|
|
||||||
ignore: undefined,
|
|
||||||
};
|
|
||||||
// Use the provided loader callback to retrieve the file's sourcemap.
|
|
||||||
// TODO: We should eventually support async loading of sourcemap files.
|
|
||||||
const sourceMap = loader(ctx.source, ctx);
|
|
||||||
const { source, content, ignore } = ctx;
|
|
||||||
// If there is a sourcemap, then we need to recurse into it to load its source files.
|
|
||||||
if (sourceMap)
|
|
||||||
return build(new traceMapping.TraceMap(sourceMap, source), loader, source, depth);
|
|
||||||
// Else, it's an unmodified source file.
|
|
||||||
// The contents of this unmodified source file can be overridden via the loader context,
|
|
||||||
// allowing it to be explicitly null or a string. If it remains undefined, we fall back to
|
|
||||||
// the importing sourcemap's `sourcesContent` field.
|
|
||||||
const sourceContent = content !== undefined ? content : sourcesContent ? sourcesContent[i] : null;
|
|
||||||
const ignored = ignore !== undefined ? ignore : ignoreList ? ignoreList.includes(i) : false;
|
|
||||||
return OriginalSource(source, sourceContent, ignored);
|
|
||||||
});
|
|
||||||
return MapSource(map, children);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
|
||||||
* provided to it.
|
|
||||||
*/
|
|
||||||
class SourceMap {
|
|
||||||
constructor(map, options) {
|
|
||||||
const out = options.decodedMappings ? genMapping.toDecodedMap(map) : genMapping.toEncodedMap(map);
|
|
||||||
this.version = out.version; // SourceMap spec says this should be first.
|
|
||||||
this.file = out.file;
|
|
||||||
this.mappings = out.mappings;
|
|
||||||
this.names = out.names;
|
|
||||||
this.ignoreList = out.ignoreList;
|
|
||||||
this.sourceRoot = out.sourceRoot;
|
|
||||||
this.sources = out.sources;
|
|
||||||
if (!options.excludeContent) {
|
|
||||||
this.sourcesContent = out.sourcesContent;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
toString() {
|
|
||||||
return JSON.stringify(this);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Traces through all the mappings in the root sourcemap, through the sources
|
|
||||||
* (and their sourcemaps), all the way back to the original source location.
|
|
||||||
*
|
|
||||||
* `loader` will be called every time we encounter a source file. If it returns
|
|
||||||
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
|
||||||
* it returns a falsey value, that source file is treated as an original,
|
|
||||||
* unmodified source file.
|
|
||||||
*
|
|
||||||
* Pass `excludeContent` to exclude any self-containing source file content
|
|
||||||
* from the output sourcemap.
|
|
||||||
*
|
|
||||||
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
|
||||||
* VLQ encoded) mappings.
|
|
||||||
*/
|
|
||||||
function remapping(input, loader, options) {
|
|
||||||
const opts = typeof options === 'object' ? options : { excludeContent: !!options, decodedMappings: false };
|
|
||||||
const tree = buildSourceMapTree(input, loader);
|
|
||||||
return new SourceMap(traceMappings(tree), opts);
|
|
||||||
}
|
|
||||||
|
|
||||||
return remapping;
|
|
||||||
|
|
||||||
}));
|
|
||||||
//# sourceMappingURL=remapping.umd.js.map
|
|
||||||
1
node_modules/@ampproject/remapping/dist/remapping.umd.js.map
generated
vendored
1
node_modules/@ampproject/remapping/dist/remapping.umd.js.map
generated
vendored
File diff suppressed because one or more lines are too long
14
node_modules/@ampproject/remapping/dist/types/build-source-map-tree.d.ts
generated
vendored
14
node_modules/@ampproject/remapping/dist/types/build-source-map-tree.d.ts
generated
vendored
@ -1,14 +0,0 @@
|
|||||||
import type { MapSource as MapSourceType } from './source-map-tree';
|
|
||||||
import type { SourceMapInput, SourceMapLoader } from './types';
|
|
||||||
/**
|
|
||||||
* Recursively builds a tree structure out of sourcemap files, with each node
|
|
||||||
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
|
||||||
* `OriginalSource`s and `SourceMapTree`s.
|
|
||||||
*
|
|
||||||
* Every sourcemap is composed of a collection of source files and mappings
|
|
||||||
* into locations of those source files. When we generate a `SourceMapTree` for
|
|
||||||
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
|
||||||
* does not have an associated sourcemap, it is considered an original,
|
|
||||||
* unmodified source file.
|
|
||||||
*/
|
|
||||||
export default function buildSourceMapTree(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader): MapSourceType;
|
|
||||||
20
node_modules/@ampproject/remapping/dist/types/remapping.d.ts
generated
vendored
20
node_modules/@ampproject/remapping/dist/types/remapping.d.ts
generated
vendored
@ -1,20 +0,0 @@
|
|||||||
import SourceMap from './source-map';
|
|
||||||
import type { SourceMapInput, SourceMapLoader, Options } from './types';
|
|
||||||
export type { SourceMapSegment, EncodedSourceMap, EncodedSourceMap as RawSourceMap, DecodedSourceMap, SourceMapInput, SourceMapLoader, LoaderContext, Options, } from './types';
|
|
||||||
export type { SourceMap };
|
|
||||||
/**
|
|
||||||
* Traces through all the mappings in the root sourcemap, through the sources
|
|
||||||
* (and their sourcemaps), all the way back to the original source location.
|
|
||||||
*
|
|
||||||
* `loader` will be called every time we encounter a source file. If it returns
|
|
||||||
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
|
||||||
* it returns a falsey value, that source file is treated as an original,
|
|
||||||
* unmodified source file.
|
|
||||||
*
|
|
||||||
* Pass `excludeContent` to exclude any self-containing source file content
|
|
||||||
* from the output sourcemap.
|
|
||||||
*
|
|
||||||
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
|
||||||
* VLQ encoded) mappings.
|
|
||||||
*/
|
|
||||||
export default function remapping(input: SourceMapInput | SourceMapInput[], loader: SourceMapLoader, options?: boolean | Options): SourceMap;
|
|
||||||
45
node_modules/@ampproject/remapping/dist/types/source-map-tree.d.ts
generated
vendored
45
node_modules/@ampproject/remapping/dist/types/source-map-tree.d.ts
generated
vendored
@ -1,45 +0,0 @@
|
|||||||
import { GenMapping } from '@jridgewell/gen-mapping';
|
|
||||||
import type { TraceMap } from '@jridgewell/trace-mapping';
|
|
||||||
export declare type SourceMapSegmentObject = {
|
|
||||||
column: number;
|
|
||||||
line: number;
|
|
||||||
name: string;
|
|
||||||
source: string;
|
|
||||||
content: string | null;
|
|
||||||
ignore: boolean;
|
|
||||||
};
|
|
||||||
export declare type OriginalSource = {
|
|
||||||
map: null;
|
|
||||||
sources: Sources[];
|
|
||||||
source: string;
|
|
||||||
content: string | null;
|
|
||||||
ignore: boolean;
|
|
||||||
};
|
|
||||||
export declare type MapSource = {
|
|
||||||
map: TraceMap;
|
|
||||||
sources: Sources[];
|
|
||||||
source: string;
|
|
||||||
content: null;
|
|
||||||
ignore: false;
|
|
||||||
};
|
|
||||||
export declare type Sources = OriginalSource | MapSource;
|
|
||||||
/**
|
|
||||||
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
|
||||||
* (which may themselves be SourceMapTrees).
|
|
||||||
*/
|
|
||||||
export declare function MapSource(map: TraceMap, sources: Sources[]): MapSource;
|
|
||||||
/**
|
|
||||||
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
|
||||||
* segment tracing ends at the `OriginalSource`.
|
|
||||||
*/
|
|
||||||
export declare function OriginalSource(source: string, content: string | null, ignore: boolean): OriginalSource;
|
|
||||||
/**
|
|
||||||
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
|
||||||
* resolving each mapping in terms of the original source files.
|
|
||||||
*/
|
|
||||||
export declare function traceMappings(tree: MapSource): GenMapping;
|
|
||||||
/**
|
|
||||||
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
|
||||||
* child SourceMapTrees, until we find the original source map.
|
|
||||||
*/
|
|
||||||
export declare function originalPositionFor(source: Sources, line: number, column: number, name: string): SourceMapSegmentObject | null;
|
|
||||||
18
node_modules/@ampproject/remapping/dist/types/source-map.d.ts
generated
vendored
18
node_modules/@ampproject/remapping/dist/types/source-map.d.ts
generated
vendored
@ -1,18 +0,0 @@
|
|||||||
import type { GenMapping } from '@jridgewell/gen-mapping';
|
|
||||||
import type { DecodedSourceMap, EncodedSourceMap, Options } from './types';
|
|
||||||
/**
|
|
||||||
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
|
||||||
* provided to it.
|
|
||||||
*/
|
|
||||||
export default class SourceMap {
|
|
||||||
file?: string | null;
|
|
||||||
mappings: EncodedSourceMap['mappings'] | DecodedSourceMap['mappings'];
|
|
||||||
sourceRoot?: string;
|
|
||||||
names: string[];
|
|
||||||
sources: (string | null)[];
|
|
||||||
sourcesContent?: (string | null)[];
|
|
||||||
version: 3;
|
|
||||||
ignoreList: number[] | undefined;
|
|
||||||
constructor(map: GenMapping, options: Options);
|
|
||||||
toString(): string;
|
|
||||||
}
|
|
||||||
15
node_modules/@ampproject/remapping/dist/types/types.d.ts
generated
vendored
15
node_modules/@ampproject/remapping/dist/types/types.d.ts
generated
vendored
@ -1,15 +0,0 @@
|
|||||||
import type { SourceMapInput } from '@jridgewell/trace-mapping';
|
|
||||||
export type { SourceMapSegment, DecodedSourceMap, EncodedSourceMap, } from '@jridgewell/trace-mapping';
|
|
||||||
export type { SourceMapInput };
|
|
||||||
export declare type LoaderContext = {
|
|
||||||
readonly importer: string;
|
|
||||||
readonly depth: number;
|
|
||||||
source: string;
|
|
||||||
content: string | null | undefined;
|
|
||||||
ignore: boolean | undefined;
|
|
||||||
};
|
|
||||||
export declare type SourceMapLoader = (file: string, ctx: LoaderContext) => SourceMapInput | null | undefined | void;
|
|
||||||
export declare type Options = {
|
|
||||||
excludeContent?: boolean;
|
|
||||||
decodedMappings?: boolean;
|
|
||||||
};
|
|
||||||
75
node_modules/@ampproject/remapping/package.json
generated
vendored
75
node_modules/@ampproject/remapping/package.json
generated
vendored
@ -1,75 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@ampproject/remapping",
|
|
||||||
"version": "2.3.0",
|
|
||||||
"description": "Remap sequential sourcemaps through transformations to point at the original source code",
|
|
||||||
"keywords": [
|
|
||||||
"source",
|
|
||||||
"map",
|
|
||||||
"remap"
|
|
||||||
],
|
|
||||||
"main": "dist/remapping.umd.js",
|
|
||||||
"module": "dist/remapping.mjs",
|
|
||||||
"types": "dist/types/remapping.d.ts",
|
|
||||||
"exports": {
|
|
||||||
".": [
|
|
||||||
{
|
|
||||||
"types": "./dist/types/remapping.d.ts",
|
|
||||||
"browser": "./dist/remapping.umd.js",
|
|
||||||
"require": "./dist/remapping.umd.js",
|
|
||||||
"import": "./dist/remapping.mjs"
|
|
||||||
},
|
|
||||||
"./dist/remapping.umd.js"
|
|
||||||
],
|
|
||||||
"./package.json": "./package.json"
|
|
||||||
},
|
|
||||||
"files": [
|
|
||||||
"dist"
|
|
||||||
],
|
|
||||||
"author": "Justin Ridgewell <jridgewell@google.com>",
|
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "git+https://github.com/ampproject/remapping.git"
|
|
||||||
},
|
|
||||||
"license": "Apache-2.0",
|
|
||||||
"engines": {
|
|
||||||
"node": ">=6.0.0"
|
|
||||||
},
|
|
||||||
"scripts": {
|
|
||||||
"build": "run-s -n build:*",
|
|
||||||
"build:rollup": "rollup -c rollup.config.js",
|
|
||||||
"build:ts": "tsc --project tsconfig.build.json",
|
|
||||||
"lint": "run-s -n lint:*",
|
|
||||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
|
||||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
|
||||||
"prebuild": "rm -rf dist",
|
|
||||||
"prepublishOnly": "npm run preversion",
|
|
||||||
"preversion": "run-s test build",
|
|
||||||
"test": "run-s -n test:lint test:only",
|
|
||||||
"test:debug": "node --inspect-brk node_modules/.bin/jest --runInBand",
|
|
||||||
"test:lint": "run-s -n test:lint:*",
|
|
||||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
|
||||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
|
||||||
"test:only": "jest --coverage",
|
|
||||||
"test:watch": "jest --coverage --watch"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@rollup/plugin-typescript": "8.3.2",
|
|
||||||
"@types/jest": "27.4.1",
|
|
||||||
"@typescript-eslint/eslint-plugin": "5.20.0",
|
|
||||||
"@typescript-eslint/parser": "5.20.0",
|
|
||||||
"eslint": "8.14.0",
|
|
||||||
"eslint-config-prettier": "8.5.0",
|
|
||||||
"jest": "27.5.1",
|
|
||||||
"jest-config": "27.5.1",
|
|
||||||
"npm-run-all": "4.1.5",
|
|
||||||
"prettier": "2.6.2",
|
|
||||||
"rollup": "2.70.2",
|
|
||||||
"ts-jest": "27.1.4",
|
|
||||||
"tslib": "2.4.0",
|
|
||||||
"typescript": "4.6.3"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@jridgewell/gen-mapping": "^0.3.5",
|
|
||||||
"@jridgewell/trace-mapping": "^0.3.24"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
15
node_modules/@isaacs/fs-minipass/LICENSE
generated
vendored
15
node_modules/@isaacs/fs-minipass/LICENSE
generated
vendored
@ -1,15 +0,0 @@
|
|||||||
The ISC License
|
|
||||||
|
|
||||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
|
||||||
|
|
||||||
Permission to use, copy, modify, and/or distribute this software for any
|
|
||||||
purpose with or without fee is hereby granted, provided that the above
|
|
||||||
copyright notice and this permission notice appear in all copies.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
|
||||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
|
||||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
|
||||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
|
||||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
|
||||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
|
||||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
||||||
71
node_modules/@isaacs/fs-minipass/README.md
generated
vendored
71
node_modules/@isaacs/fs-minipass/README.md
generated
vendored
@ -1,71 +0,0 @@
|
|||||||
# fs-minipass
|
|
||||||
|
|
||||||
Filesystem streams based on [minipass](http://npm.im/minipass).
|
|
||||||
|
|
||||||
4 classes are exported:
|
|
||||||
|
|
||||||
- ReadStream
|
|
||||||
- ReadStreamSync
|
|
||||||
- WriteStream
|
|
||||||
- WriteStreamSync
|
|
||||||
|
|
||||||
When using `ReadStreamSync`, all of the data is made available
|
|
||||||
immediately upon consuming the stream. Nothing is buffered in memory
|
|
||||||
when the stream is constructed. If the stream is piped to a writer,
|
|
||||||
then it will synchronously `read()` and emit data into the writer as
|
|
||||||
fast as the writer can consume it. (That is, it will respect
|
|
||||||
backpressure.) If you call `stream.read()` then it will read the
|
|
||||||
entire file and return the contents.
|
|
||||||
|
|
||||||
When using `WriteStreamSync`, every write is flushed to the file
|
|
||||||
synchronously. If your writes all come in a single tick, then it'll
|
|
||||||
write it all out in a single tick. It's as synchronous as you are.
|
|
||||||
|
|
||||||
The async versions work much like their node builtin counterparts,
|
|
||||||
with the exception of introducing significantly less Stream machinery
|
|
||||||
overhead.
|
|
||||||
|
|
||||||
## USAGE
|
|
||||||
|
|
||||||
It's just streams, you pipe them or read() them or write() to them.
|
|
||||||
|
|
||||||
```js
|
|
||||||
import { ReadStream, WriteStream } from 'fs-minipass'
|
|
||||||
// or: const { ReadStream, WriteStream } = require('fs-minipass')
|
|
||||||
const readStream = new ReadStream('file.txt')
|
|
||||||
const writeStream = new WriteStream('output.txt')
|
|
||||||
writeStream.write('some file header or whatever\n')
|
|
||||||
readStream.pipe(writeStream)
|
|
||||||
```
|
|
||||||
|
|
||||||
## ReadStream(path, options)
|
|
||||||
|
|
||||||
Path string is required, but somewhat irrelevant if an open file
|
|
||||||
descriptor is passed in as an option.
|
|
||||||
|
|
||||||
Options:
|
|
||||||
|
|
||||||
- `fd` Pass in a numeric file descriptor, if the file is already open.
|
|
||||||
- `readSize` The size of reads to do, defaults to 16MB
|
|
||||||
- `size` The size of the file, if known. Prevents zero-byte read()
|
|
||||||
call at the end.
|
|
||||||
- `autoClose` Set to `false` to prevent the file descriptor from being
|
|
||||||
closed when the file is done being read.
|
|
||||||
|
|
||||||
## WriteStream(path, options)
|
|
||||||
|
|
||||||
Path string is required, but somewhat irrelevant if an open file
|
|
||||||
descriptor is passed in as an option.
|
|
||||||
|
|
||||||
Options:
|
|
||||||
|
|
||||||
- `fd` Pass in a numeric file descriptor, if the file is already open.
|
|
||||||
- `mode` The mode to create the file with. Defaults to `0o666`.
|
|
||||||
- `start` The position in the file to start reading. If not
|
|
||||||
specified, then the file will start writing at position zero, and be
|
|
||||||
truncated by default.
|
|
||||||
- `autoClose` Set to `false` to prevent the file descriptor from being
|
|
||||||
closed when the stream is ended.
|
|
||||||
- `flags` Flags to use when opening the file. Irrelevant if `fd` is
|
|
||||||
passed in, since file won't be opened in that case. Defaults to
|
|
||||||
`'a'` if a `pos` is specified, or `'w'` otherwise.
|
|
||||||
118
node_modules/@isaacs/fs-minipass/dist/commonjs/index.d.ts
generated
vendored
118
node_modules/@isaacs/fs-minipass/dist/commonjs/index.d.ts
generated
vendored
@ -1,118 +0,0 @@
|
|||||||
/// <reference types="node" />
|
|
||||||
/// <reference types="node" />
|
|
||||||
/// <reference types="node" />
|
|
||||||
import EE from 'events';
|
|
||||||
import { Minipass } from 'minipass';
|
|
||||||
declare const _autoClose: unique symbol;
|
|
||||||
declare const _close: unique symbol;
|
|
||||||
declare const _ended: unique symbol;
|
|
||||||
declare const _fd: unique symbol;
|
|
||||||
declare const _finished: unique symbol;
|
|
||||||
declare const _flags: unique symbol;
|
|
||||||
declare const _flush: unique symbol;
|
|
||||||
declare const _handleChunk: unique symbol;
|
|
||||||
declare const _makeBuf: unique symbol;
|
|
||||||
declare const _mode: unique symbol;
|
|
||||||
declare const _needDrain: unique symbol;
|
|
||||||
declare const _onerror: unique symbol;
|
|
||||||
declare const _onopen: unique symbol;
|
|
||||||
declare const _onread: unique symbol;
|
|
||||||
declare const _onwrite: unique symbol;
|
|
||||||
declare const _open: unique symbol;
|
|
||||||
declare const _path: unique symbol;
|
|
||||||
declare const _pos: unique symbol;
|
|
||||||
declare const _queue: unique symbol;
|
|
||||||
declare const _read: unique symbol;
|
|
||||||
declare const _readSize: unique symbol;
|
|
||||||
declare const _reading: unique symbol;
|
|
||||||
declare const _remain: unique symbol;
|
|
||||||
declare const _size: unique symbol;
|
|
||||||
declare const _write: unique symbol;
|
|
||||||
declare const _writing: unique symbol;
|
|
||||||
declare const _defaultFlag: unique symbol;
|
|
||||||
declare const _errored: unique symbol;
|
|
||||||
export type ReadStreamOptions = Minipass.Options<Minipass.ContiguousData> & {
|
|
||||||
fd?: number;
|
|
||||||
readSize?: number;
|
|
||||||
size?: number;
|
|
||||||
autoClose?: boolean;
|
|
||||||
};
|
|
||||||
export type ReadStreamEvents = Minipass.Events<Minipass.ContiguousData> & {
|
|
||||||
open: [fd: number];
|
|
||||||
};
|
|
||||||
export declare class ReadStream extends Minipass<Minipass.ContiguousData, Buffer, ReadStreamEvents> {
|
|
||||||
[_errored]: boolean;
|
|
||||||
[_fd]?: number;
|
|
||||||
[_path]: string;
|
|
||||||
[_readSize]: number;
|
|
||||||
[_reading]: boolean;
|
|
||||||
[_size]: number;
|
|
||||||
[_remain]: number;
|
|
||||||
[_autoClose]: boolean;
|
|
||||||
constructor(path: string, opt: ReadStreamOptions);
|
|
||||||
get fd(): number | undefined;
|
|
||||||
get path(): string;
|
|
||||||
write(): void;
|
|
||||||
end(): void;
|
|
||||||
[_open](): void;
|
|
||||||
[_onopen](er?: NodeJS.ErrnoException | null, fd?: number): void;
|
|
||||||
[_makeBuf](): Buffer;
|
|
||||||
[_read](): void;
|
|
||||||
[_onread](er?: NodeJS.ErrnoException | null, br?: number, buf?: Buffer): void;
|
|
||||||
[_close](): void;
|
|
||||||
[_onerror](er: NodeJS.ErrnoException): void;
|
|
||||||
[_handleChunk](br: number, buf: Buffer): boolean;
|
|
||||||
emit<Event extends keyof ReadStreamEvents>(ev: Event, ...args: ReadStreamEvents[Event]): boolean;
|
|
||||||
}
|
|
||||||
export declare class ReadStreamSync extends ReadStream {
|
|
||||||
[_open](): void;
|
|
||||||
[_read](): void;
|
|
||||||
[_close](): void;
|
|
||||||
}
|
|
||||||
export type WriteStreamOptions = {
|
|
||||||
fd?: number;
|
|
||||||
autoClose?: boolean;
|
|
||||||
mode?: number;
|
|
||||||
captureRejections?: boolean;
|
|
||||||
start?: number;
|
|
||||||
flags?: string;
|
|
||||||
};
|
|
||||||
export declare class WriteStream extends EE {
|
|
||||||
readable: false;
|
|
||||||
writable: boolean;
|
|
||||||
[_errored]: boolean;
|
|
||||||
[_writing]: boolean;
|
|
||||||
[_ended]: boolean;
|
|
||||||
[_queue]: Buffer[];
|
|
||||||
[_needDrain]: boolean;
|
|
||||||
[_path]: string;
|
|
||||||
[_mode]: number;
|
|
||||||
[_autoClose]: boolean;
|
|
||||||
[_fd]?: number;
|
|
||||||
[_defaultFlag]: boolean;
|
|
||||||
[_flags]: string;
|
|
||||||
[_finished]: boolean;
|
|
||||||
[_pos]?: number;
|
|
||||||
constructor(path: string, opt: WriteStreamOptions);
|
|
||||||
emit(ev: string, ...args: any[]): boolean;
|
|
||||||
get fd(): number | undefined;
|
|
||||||
get path(): string;
|
|
||||||
[_onerror](er: NodeJS.ErrnoException): void;
|
|
||||||
[_open](): void;
|
|
||||||
[_onopen](er?: null | NodeJS.ErrnoException, fd?: number): void;
|
|
||||||
end(buf: string, enc?: BufferEncoding): this;
|
|
||||||
end(buf?: Buffer, enc?: undefined): this;
|
|
||||||
write(buf: string, enc?: BufferEncoding): boolean;
|
|
||||||
write(buf: Buffer, enc?: undefined): boolean;
|
|
||||||
[_write](buf: Buffer): void;
|
|
||||||
[_onwrite](er?: null | NodeJS.ErrnoException, bw?: number): void;
|
|
||||||
[_flush](): void;
|
|
||||||
[_close](): void;
|
|
||||||
}
|
|
||||||
export declare class WriteStreamSync extends WriteStream {
|
|
||||||
[_open](): void;
|
|
||||||
[_close](): void;
|
|
||||||
[_write](buf: Buffer): void;
|
|
||||||
}
|
|
||||||
export {};
|
|
||||||
//# sourceMappingURL=index.d.ts.map
|
|
||||||
1
node_modules/@isaacs/fs-minipass/dist/commonjs/index.d.ts.map
generated
vendored
1
node_modules/@isaacs/fs-minipass/dist/commonjs/index.d.ts.map
generated
vendored
@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;AAAA,OAAO,EAAE,MAAM,QAAQ,CAAA;AAEvB,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AAInC,QAAA,MAAM,UAAU,eAAuB,CAAA;AACvC,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,GAAG,eAAgB,CAAA;AACzB,QAAA,MAAM,SAAS,eAAsB,CAAA;AACrC,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,YAAY,eAAyB,CAAA;AAC3C,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,UAAU,eAAuB,CAAA;AACvC,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,OAAO,eAAoB,CAAA;AACjC,QAAA,MAAM,OAAO,eAAoB,CAAA;AACjC,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,IAAI,eAAiB,CAAA;AAC3B,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,SAAS,eAAsB,CAAA;AACrC,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,OAAO,eAAoB,CAAA;AACjC,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,YAAY,eAAyB,CAAA;AAC3C,QAAA,MAAM,QAAQ,eAAqB,CAAA;AAEnC,MAAM,MAAM,iBAAiB,GAC3B,QAAQ,CAAC,OAAO,CAAC,QAAQ,CAAC,cAAc,CAAC,GAAG;IAC1C,EAAE,CAAC,EAAE,MAAM,CAAA;IACX,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,SAAS,CAAC,EAAE,OAAO,CAAA;CACpB,CAAA;AAEH,MAAM,MAAM,gBAAgB,GAAG,QAAQ,CAAC,MAAM,CAAC,QAAQ,CAAC,cAAc,CAAC,GAAG;IACxE,IAAI,EAAE,CAAC,EAAE,EAAE,MAAM,CAAC,CAAA;CACnB,CAAA;AAED,qBAAa,UAAW,SAAQ,QAAQ,CACtC,QAAQ,CAAC,cAAc,EACvB,MAAM,EACN,gBAAgB,CACjB;IACC,CAAC,QAAQ,CAAC,EAAE,OAAO,CAAS;IAC5B,CAAC,GAAG,CAAC,CAAC,EAAE,MAAM,CAAC;IACf,CAAC,KAAK,CAAC,EAAE,MAAM,CAAC;IAChB,CAAC,SAAS,CAAC,EAAE,MAAM,CAAC;IACpB,CAAC,QAAQ,CAAC,EAAE,OAAO,CAAS;IAC5B,CAAC,KAAK,CAAC,EAAE,MAAM,CAAC;IAChB,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC;IAClB,CAAC,UAAU,CAAC,EAAE,OAAO,CAAA;gBAET,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,iBAAiB;IA4BhD,IAAI,EAAE,uBAEL;IAED,IAAI,IAAI,WAEP;IAGD,KAAK;IAKL,GAAG;IAIH,CAAC,KAAK,CAAC;IAIP,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,EAAE,CAAC,EAAE,MAAM;IAUxD,CAAC,QAAQ,CAAC;IAIV,CAAC,KAAK,CAAC;IAeP,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,EAAE,CAAC,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,MAAM;IAStE,CAAC,MAAM,CAAC;IAUR,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,MAAM,CAAC,cAAc;IAMpC,CAAC,YAAY,CAAC,CAAC,EAAE,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM;IAiBtC,IAAI,CAAC,KAAK,SAAS,MAAM,gBAAgB,EACvC,EAAE,EAAE,KAAK,EACT,GAAG,IAAI,EAAE,gBAAgB,CAAC,KAAK,CAAC,GAC/B,OAAO;CAuBX;AAED,qBAAa,cAAe,SAAQ,UAAU;IAC5C,CAAC,KAAK,CAAC;IAYP,CAAC,KAAK,CAAC;IA2BP,CAAC,MAAM,CAAC;CAQT;AAED,MAAM,MAAM,kBAAkB,GAAG;IAC/B,EAAE,CAAC,EAAE,MAAM,CAAA;IACX,SAAS,CAAC,EAAE,OAAO,CAAA;IACnB,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,iBAAiB,CAAC,EAAE,OAAO,CAAA;IAC3B,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,KAAK,CAAC,EAAE,MAAM,CAAA;CACf,CAAA;AAED,qBAAa,WAAY,SAAQ,EAAE;IACjC,QAAQ,EAAE,KAAK,CAAQ;IACvB,QAAQ,EAAE,OAAO,CAAQ;IACzB,CAAC,QAAQ,CAAC,EAAE,OAAO,CAAS;IAC5B,CAAC,QAAQ,CAAC,EAAE,OAAO,CAAS;IAC5B,CAAC,MAAM,CAAC,EAAE,OAAO,CAAS;IAC1B,CAAC,MAAM,CAAC,EAAE,MAAM,EAAE,CAAM;IACxB,CAAC,UAAU,CAAC,EAAE,OAAO,CAAS;IAC9B,CAAC,KAAK,CAAC,EAAE,MAAM,CAAC;IAChB,CAAC,KAAK,CAAC,EAAE,MAAM,CAAC;IAChB,CAAC,UAAU,CAAC,EAAE,OAAO,CAAC;IACtB,CAAC,GAAG,CAAC,CAAC,EAAE,MAAM,CAAC;IACf,CAAC,YAAY,CAAC,EAAE,OAAO,CAAC;IACxB,CAAC,MAAM,CAAC,EAAE,MAAM,CAAC;IACjB,CAAC,SAAS,CAAC,EAAE,OAAO,CAAS;IAC7B,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAA;gBAEH,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,kBAAkB;IAoBjD,IAAI,CAAC,EAAE,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,GAAG,EAAE;IAU/B,IAAI,EAAE,uBAEL;IAED,IAAI,IAAI,WAEP;IAED,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,MAAM,CAAC,cAAc;IAMpC,CAAC,KAAK,CAAC;IAMP,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,GAAG,MAAM,CAAC,cAAc,EAAE,EAAE,CAAC,EAAE,MAAM;IAoBxD,GAAG,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,cAAc,GAAG,IAAI;IAC5C,GAAG,CAAC,GAAG,CAAC,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,SAAS,GAAG,IAAI;IAoBxC,KAAK,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,cAAc,GAAG,OAAO;IACjD,KAAK,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,SAAS,GAAG,OAAO;IAsB5C,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,MAAM;IAWpB,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,GAAG,MAAM,CAAC,cAAc,EAAE,EAAE,CAAC,EAAE,MAAM;IAwBzD,CAAC,MAAM,CAAC;IAgBR,CAAC,MAAM,CAAC;CAST;AAED,qBAAa,eAAgB,SAAQ,WAAW;IAC9C,CAAC,KAAK,CAAC,IAAI,IAAI;IAsBf,CAAC,MAAM,CAAC;IASR,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,MAAM;CAmBrB"}
|
|
||||||
430
node_modules/@isaacs/fs-minipass/dist/commonjs/index.js
generated
vendored
430
node_modules/@isaacs/fs-minipass/dist/commonjs/index.js
generated
vendored
@ -1,430 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
||||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
exports.WriteStreamSync = exports.WriteStream = exports.ReadStreamSync = exports.ReadStream = void 0;
|
|
||||||
const events_1 = __importDefault(require("events"));
|
|
||||||
const fs_1 = __importDefault(require("fs"));
|
|
||||||
const minipass_1 = require("minipass");
|
|
||||||
const writev = fs_1.default.writev;
|
|
||||||
const _autoClose = Symbol('_autoClose');
|
|
||||||
const _close = Symbol('_close');
|
|
||||||
const _ended = Symbol('_ended');
|
|
||||||
const _fd = Symbol('_fd');
|
|
||||||
const _finished = Symbol('_finished');
|
|
||||||
const _flags = Symbol('_flags');
|
|
||||||
const _flush = Symbol('_flush');
|
|
||||||
const _handleChunk = Symbol('_handleChunk');
|
|
||||||
const _makeBuf = Symbol('_makeBuf');
|
|
||||||
const _mode = Symbol('_mode');
|
|
||||||
const _needDrain = Symbol('_needDrain');
|
|
||||||
const _onerror = Symbol('_onerror');
|
|
||||||
const _onopen = Symbol('_onopen');
|
|
||||||
const _onread = Symbol('_onread');
|
|
||||||
const _onwrite = Symbol('_onwrite');
|
|
||||||
const _open = Symbol('_open');
|
|
||||||
const _path = Symbol('_path');
|
|
||||||
const _pos = Symbol('_pos');
|
|
||||||
const _queue = Symbol('_queue');
|
|
||||||
const _read = Symbol('_read');
|
|
||||||
const _readSize = Symbol('_readSize');
|
|
||||||
const _reading = Symbol('_reading');
|
|
||||||
const _remain = Symbol('_remain');
|
|
||||||
const _size = Symbol('_size');
|
|
||||||
const _write = Symbol('_write');
|
|
||||||
const _writing = Symbol('_writing');
|
|
||||||
const _defaultFlag = Symbol('_defaultFlag');
|
|
||||||
const _errored = Symbol('_errored');
|
|
||||||
class ReadStream extends minipass_1.Minipass {
|
|
||||||
[_errored] = false;
|
|
||||||
[_fd];
|
|
||||||
[_path];
|
|
||||||
[_readSize];
|
|
||||||
[_reading] = false;
|
|
||||||
[_size];
|
|
||||||
[_remain];
|
|
||||||
[_autoClose];
|
|
||||||
constructor(path, opt) {
|
|
||||||
opt = opt || {};
|
|
||||||
super(opt);
|
|
||||||
this.readable = true;
|
|
||||||
this.writable = false;
|
|
||||||
if (typeof path !== 'string') {
|
|
||||||
throw new TypeError('path must be a string');
|
|
||||||
}
|
|
||||||
this[_errored] = false;
|
|
||||||
this[_fd] = typeof opt.fd === 'number' ? opt.fd : undefined;
|
|
||||||
this[_path] = path;
|
|
||||||
this[_readSize] = opt.readSize || 16 * 1024 * 1024;
|
|
||||||
this[_reading] = false;
|
|
||||||
this[_size] = typeof opt.size === 'number' ? opt.size : Infinity;
|
|
||||||
this[_remain] = this[_size];
|
|
||||||
this[_autoClose] =
|
|
||||||
typeof opt.autoClose === 'boolean' ? opt.autoClose : true;
|
|
||||||
if (typeof this[_fd] === 'number') {
|
|
||||||
this[_read]();
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
this[_open]();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
get fd() {
|
|
||||||
return this[_fd];
|
|
||||||
}
|
|
||||||
get path() {
|
|
||||||
return this[_path];
|
|
||||||
}
|
|
||||||
//@ts-ignore
|
|
||||||
write() {
|
|
||||||
throw new TypeError('this is a readable stream');
|
|
||||||
}
|
|
||||||
//@ts-ignore
|
|
||||||
end() {
|
|
||||||
throw new TypeError('this is a readable stream');
|
|
||||||
}
|
|
||||||
[_open]() {
|
|
||||||
fs_1.default.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd));
|
|
||||||
}
|
|
||||||
[_onopen](er, fd) {
|
|
||||||
if (er) {
|
|
||||||
this[_onerror](er);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
this[_fd] = fd;
|
|
||||||
this.emit('open', fd);
|
|
||||||
this[_read]();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
[_makeBuf]() {
|
|
||||||
return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain]));
|
|
||||||
}
|
|
||||||
[_read]() {
|
|
||||||
if (!this[_reading]) {
|
|
||||||
this[_reading] = true;
|
|
||||||
const buf = this[_makeBuf]();
|
|
||||||
/* c8 ignore start */
|
|
||||||
if (buf.length === 0) {
|
|
||||||
return process.nextTick(() => this[_onread](null, 0, buf));
|
|
||||||
}
|
|
||||||
/* c8 ignore stop */
|
|
||||||
fs_1.default.read(this[_fd], buf, 0, buf.length, null, (er, br, b) => this[_onread](er, br, b));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
[_onread](er, br, buf) {
|
|
||||||
this[_reading] = false;
|
|
||||||
if (er) {
|
|
||||||
this[_onerror](er);
|
|
||||||
}
|
|
||||||
else if (this[_handleChunk](br, buf)) {
|
|
||||||
this[_read]();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
[_close]() {
|
|
||||||
if (this[_autoClose] && typeof this[_fd] === 'number') {
|
|
||||||
const fd = this[_fd];
|
|
||||||
this[_fd] = undefined;
|
|
||||||
fs_1.default.close(fd, er => er ? this.emit('error', er) : this.emit('close'));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
[_onerror](er) {
|
|
||||||
this[_reading] = true;
|
|
||||||
this[_close]();
|
|
||||||
this.emit('error', er);
|
|
||||||
}
|
|
||||||
[_handleChunk](br, buf) {
|
|
||||||
let ret = false;
|
|
||||||
// no effect if infinite
|
|
||||||
this[_remain] -= br;
|
|
||||||
if (br > 0) {
|
|
||||||
ret = super.write(br < buf.length ? buf.subarray(0, br) : buf);
|
|
||||||
}
|
|
||||||
if (br === 0 || this[_remain] <= 0) {
|
|
||||||
ret = false;
|
|
||||||
this[_close]();
|
|
||||||
super.end();
|
|
||||||
}
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
emit(ev, ...args) {
|
|
||||||
switch (ev) {
|
|
||||||
case 'prefinish':
|
|
||||||
case 'finish':
|
|
||||||
return false;
|
|
||||||
case 'drain':
|
|
||||||
if (typeof this[_fd] === 'number') {
|
|
||||||
this[_read]();
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
case 'error':
|
|
||||||
if (this[_errored]) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
this[_errored] = true;
|
|
||||||
return super.emit(ev, ...args);
|
|
||||||
default:
|
|
||||||
return super.emit(ev, ...args);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports.ReadStream = ReadStream;
|
|
||||||
class ReadStreamSync extends ReadStream {
|
|
||||||
[_open]() {
|
|
||||||
let threw = true;
|
|
||||||
try {
|
|
||||||
this[_onopen](null, fs_1.default.openSync(this[_path], 'r'));
|
|
||||||
threw = false;
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
if (threw) {
|
|
||||||
this[_close]();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
[_read]() {
|
|
||||||
let threw = true;
|
|
||||||
try {
|
|
||||||
if (!this[_reading]) {
|
|
||||||
this[_reading] = true;
|
|
||||||
do {
|
|
||||||
const buf = this[_makeBuf]();
|
|
||||||
/* c8 ignore start */
|
|
||||||
const br = buf.length === 0
|
|
||||||
? 0
|
|
||||||
: fs_1.default.readSync(this[_fd], buf, 0, buf.length, null);
|
|
||||||
/* c8 ignore stop */
|
|
||||||
if (!this[_handleChunk](br, buf)) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} while (true);
|
|
||||||
this[_reading] = false;
|
|
||||||
}
|
|
||||||
threw = false;
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
if (threw) {
|
|
||||||
this[_close]();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
[_close]() {
|
|
||||||
if (this[_autoClose] && typeof this[_fd] === 'number') {
|
|
||||||
const fd = this[_fd];
|
|
||||||
this[_fd] = undefined;
|
|
||||||
fs_1.default.closeSync(fd);
|
|
||||||
this.emit('close');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports.ReadStreamSync = ReadStreamSync;
|
|
||||||
class WriteStream extends events_1.default {
|
|
||||||
readable = false;
|
|
||||||
writable = true;
|
|
||||||
[_errored] = false;
|
|
||||||
[_writing] = false;
|
|
||||||
[_ended] = false;
|
|
||||||
[_queue] = [];
|
|
||||||
[_needDrain] = false;
|
|
||||||
[_path];
|
|
||||||
[_mode];
|
|
||||||
[_autoClose];
|
|
||||||
[_fd];
|
|
||||||
[_defaultFlag];
|
|
||||||
[_flags];
|
|
||||||
[_finished] = false;
|
|
||||||
[_pos];
|
|
||||||
constructor(path, opt) {
|
|
||||||
opt = opt || {};
|
|
||||||
super(opt);
|
|
||||||
this[_path] = path;
|
|
||||||
this[_fd] = typeof opt.fd === 'number' ? opt.fd : undefined;
|
|
||||||
this[_mode] = opt.mode === undefined ? 0o666 : opt.mode;
|
|
||||||
this[_pos] = typeof opt.start === 'number' ? opt.start : undefined;
|
|
||||||
this[_autoClose] =
|
|
||||||
typeof opt.autoClose === 'boolean' ? opt.autoClose : true;
|
|
||||||
// truncating makes no sense when writing into the middle
|
|
||||||
const defaultFlag = this[_pos] !== undefined ? 'r+' : 'w';
|
|
||||||
this[_defaultFlag] = opt.flags === undefined;
|
|
||||||
this[_flags] = opt.flags === undefined ? defaultFlag : opt.flags;
|
|
||||||
if (this[_fd] === undefined) {
|
|
||||||
this[_open]();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
emit(ev, ...args) {
|
|
||||||
if (ev === 'error') {
|
|
||||||
if (this[_errored]) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
this[_errored] = true;
|
|
||||||
}
|
|
||||||
return super.emit(ev, ...args);
|
|
||||||
}
|
|
||||||
get fd() {
|
|
||||||
return this[_fd];
|
|
||||||
}
|
|
||||||
get path() {
|
|
||||||
return this[_path];
|
|
||||||
}
|
|
||||||
[_onerror](er) {
|
|
||||||
this[_close]();
|
|
||||||
this[_writing] = true;
|
|
||||||
this.emit('error', er);
|
|
||||||
}
|
|
||||||
[_open]() {
|
|
||||||
fs_1.default.open(this[_path], this[_flags], this[_mode], (er, fd) => this[_onopen](er, fd));
|
|
||||||
}
|
|
||||||
[_onopen](er, fd) {
|
|
||||||
if (this[_defaultFlag] &&
|
|
||||||
this[_flags] === 'r+' &&
|
|
||||||
er &&
|
|
||||||
er.code === 'ENOENT') {
|
|
||||||
this[_flags] = 'w';
|
|
||||||
this[_open]();
|
|
||||||
}
|
|
||||||
else if (er) {
|
|
||||||
this[_onerror](er);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
this[_fd] = fd;
|
|
||||||
this.emit('open', fd);
|
|
||||||
if (!this[_writing]) {
|
|
||||||
this[_flush]();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
end(buf, enc) {
|
|
||||||
if (buf) {
|
|
||||||
//@ts-ignore
|
|
||||||
this.write(buf, enc);
|
|
||||||
}
|
|
||||||
this[_ended] = true;
|
|
||||||
// synthetic after-write logic, where drain/finish live
|
|
||||||
if (!this[_writing] &&
|
|
||||||
!this[_queue].length &&
|
|
||||||
typeof this[_fd] === 'number') {
|
|
||||||
this[_onwrite](null, 0);
|
|
||||||
}
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
write(buf, enc) {
|
|
||||||
if (typeof buf === 'string') {
|
|
||||||
buf = Buffer.from(buf, enc);
|
|
||||||
}
|
|
||||||
if (this[_ended]) {
|
|
||||||
this.emit('error', new Error('write() after end()'));
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (this[_fd] === undefined || this[_writing] || this[_queue].length) {
|
|
||||||
this[_queue].push(buf);
|
|
||||||
this[_needDrain] = true;
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
this[_writing] = true;
|
|
||||||
this[_write](buf);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
[_write](buf) {
|
|
||||||
fs_1.default.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) => this[_onwrite](er, bw));
|
|
||||||
}
|
|
||||||
[_onwrite](er, bw) {
|
|
||||||
if (er) {
|
|
||||||
this[_onerror](er);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
if (this[_pos] !== undefined && typeof bw === 'number') {
|
|
||||||
this[_pos] += bw;
|
|
||||||
}
|
|
||||||
if (this[_queue].length) {
|
|
||||||
this[_flush]();
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
this[_writing] = false;
|
|
||||||
if (this[_ended] && !this[_finished]) {
|
|
||||||
this[_finished] = true;
|
|
||||||
this[_close]();
|
|
||||||
this.emit('finish');
|
|
||||||
}
|
|
||||||
else if (this[_needDrain]) {
|
|
||||||
this[_needDrain] = false;
|
|
||||||
this.emit('drain');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
[_flush]() {
|
|
||||||
if (this[_queue].length === 0) {
|
|
||||||
if (this[_ended]) {
|
|
||||||
this[_onwrite](null, 0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (this[_queue].length === 1) {
|
|
||||||
this[_write](this[_queue].pop());
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
const iovec = this[_queue];
|
|
||||||
this[_queue] = [];
|
|
||||||
writev(this[_fd], iovec, this[_pos], (er, bw) => this[_onwrite](er, bw));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
[_close]() {
|
|
||||||
if (this[_autoClose] && typeof this[_fd] === 'number') {
|
|
||||||
const fd = this[_fd];
|
|
||||||
this[_fd] = undefined;
|
|
||||||
fs_1.default.close(fd, er => er ? this.emit('error', er) : this.emit('close'));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports.WriteStream = WriteStream;
|
|
||||||
class WriteStreamSync extends WriteStream {
|
|
||||||
[_open]() {
|
|
||||||
let fd;
|
|
||||||
// only wrap in a try{} block if we know we'll retry, to avoid
|
|
||||||
// the rethrow obscuring the error's source frame in most cases.
|
|
||||||
if (this[_defaultFlag] && this[_flags] === 'r+') {
|
|
||||||
try {
|
|
||||||
fd = fs_1.default.openSync(this[_path], this[_flags], this[_mode]);
|
|
||||||
}
|
|
||||||
catch (er) {
|
|
||||||
if (er?.code === 'ENOENT') {
|
|
||||||
this[_flags] = 'w';
|
|
||||||
return this[_open]();
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
throw er;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
fd = fs_1.default.openSync(this[_path], this[_flags], this[_mode]);
|
|
||||||
}
|
|
||||||
this[_onopen](null, fd);
|
|
||||||
}
|
|
||||||
[_close]() {
|
|
||||||
if (this[_autoClose] && typeof this[_fd] === 'number') {
|
|
||||||
const fd = this[_fd];
|
|
||||||
this[_fd] = undefined;
|
|
||||||
fs_1.default.closeSync(fd);
|
|
||||||
this.emit('close');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
[_write](buf) {
|
|
||||||
// throw the original, but try to close if it fails
|
|
||||||
let threw = true;
|
|
||||||
try {
|
|
||||||
this[_onwrite](null, fs_1.default.writeSync(this[_fd], buf, 0, buf.length, this[_pos]));
|
|
||||||
threw = false;
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
if (threw) {
|
|
||||||
try {
|
|
||||||
this[_close]();
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
// ok error
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
exports.WriteStreamSync = WriteStreamSync;
|
|
||||||
//# sourceMappingURL=index.js.map
|
|
||||||
1
node_modules/@isaacs/fs-minipass/dist/commonjs/index.js.map
generated
vendored
1
node_modules/@isaacs/fs-minipass/dist/commonjs/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
3
node_modules/@isaacs/fs-minipass/dist/commonjs/package.json
generated
vendored
3
node_modules/@isaacs/fs-minipass/dist/commonjs/package.json
generated
vendored
@ -1,3 +0,0 @@
|
|||||||
{
|
|
||||||
"type": "commonjs"
|
|
||||||
}
|
|
||||||
118
node_modules/@isaacs/fs-minipass/dist/esm/index.d.ts
generated
vendored
118
node_modules/@isaacs/fs-minipass/dist/esm/index.d.ts
generated
vendored
@ -1,118 +0,0 @@
|
|||||||
/// <reference types="node" resolution-mode="require"/>
|
|
||||||
/// <reference types="node" resolution-mode="require"/>
|
|
||||||
/// <reference types="node" resolution-mode="require"/>
|
|
||||||
import EE from 'events';
|
|
||||||
import { Minipass } from 'minipass';
|
|
||||||
declare const _autoClose: unique symbol;
|
|
||||||
declare const _close: unique symbol;
|
|
||||||
declare const _ended: unique symbol;
|
|
||||||
declare const _fd: unique symbol;
|
|
||||||
declare const _finished: unique symbol;
|
|
||||||
declare const _flags: unique symbol;
|
|
||||||
declare const _flush: unique symbol;
|
|
||||||
declare const _handleChunk: unique symbol;
|
|
||||||
declare const _makeBuf: unique symbol;
|
|
||||||
declare const _mode: unique symbol;
|
|
||||||
declare const _needDrain: unique symbol;
|
|
||||||
declare const _onerror: unique symbol;
|
|
||||||
declare const _onopen: unique symbol;
|
|
||||||
declare const _onread: unique symbol;
|
|
||||||
declare const _onwrite: unique symbol;
|
|
||||||
declare const _open: unique symbol;
|
|
||||||
declare const _path: unique symbol;
|
|
||||||
declare const _pos: unique symbol;
|
|
||||||
declare const _queue: unique symbol;
|
|
||||||
declare const _read: unique symbol;
|
|
||||||
declare const _readSize: unique symbol;
|
|
||||||
declare const _reading: unique symbol;
|
|
||||||
declare const _remain: unique symbol;
|
|
||||||
declare const _size: unique symbol;
|
|
||||||
declare const _write: unique symbol;
|
|
||||||
declare const _writing: unique symbol;
|
|
||||||
declare const _defaultFlag: unique symbol;
|
|
||||||
declare const _errored: unique symbol;
|
|
||||||
export type ReadStreamOptions = Minipass.Options<Minipass.ContiguousData> & {
|
|
||||||
fd?: number;
|
|
||||||
readSize?: number;
|
|
||||||
size?: number;
|
|
||||||
autoClose?: boolean;
|
|
||||||
};
|
|
||||||
export type ReadStreamEvents = Minipass.Events<Minipass.ContiguousData> & {
|
|
||||||
open: [fd: number];
|
|
||||||
};
|
|
||||||
export declare class ReadStream extends Minipass<Minipass.ContiguousData, Buffer, ReadStreamEvents> {
|
|
||||||
[_errored]: boolean;
|
|
||||||
[_fd]?: number;
|
|
||||||
[_path]: string;
|
|
||||||
[_readSize]: number;
|
|
||||||
[_reading]: boolean;
|
|
||||||
[_size]: number;
|
|
||||||
[_remain]: number;
|
|
||||||
[_autoClose]: boolean;
|
|
||||||
constructor(path: string, opt: ReadStreamOptions);
|
|
||||||
get fd(): number | undefined;
|
|
||||||
get path(): string;
|
|
||||||
write(): void;
|
|
||||||
end(): void;
|
|
||||||
[_open](): void;
|
|
||||||
[_onopen](er?: NodeJS.ErrnoException | null, fd?: number): void;
|
|
||||||
[_makeBuf](): Buffer;
|
|
||||||
[_read](): void;
|
|
||||||
[_onread](er?: NodeJS.ErrnoException | null, br?: number, buf?: Buffer): void;
|
|
||||||
[_close](): void;
|
|
||||||
[_onerror](er: NodeJS.ErrnoException): void;
|
|
||||||
[_handleChunk](br: number, buf: Buffer): boolean;
|
|
||||||
emit<Event extends keyof ReadStreamEvents>(ev: Event, ...args: ReadStreamEvents[Event]): boolean;
|
|
||||||
}
|
|
||||||
export declare class ReadStreamSync extends ReadStream {
|
|
||||||
[_open](): void;
|
|
||||||
[_read](): void;
|
|
||||||
[_close](): void;
|
|
||||||
}
|
|
||||||
export type WriteStreamOptions = {
|
|
||||||
fd?: number;
|
|
||||||
autoClose?: boolean;
|
|
||||||
mode?: number;
|
|
||||||
captureRejections?: boolean;
|
|
||||||
start?: number;
|
|
||||||
flags?: string;
|
|
||||||
};
|
|
||||||
export declare class WriteStream extends EE {
|
|
||||||
readable: false;
|
|
||||||
writable: boolean;
|
|
||||||
[_errored]: boolean;
|
|
||||||
[_writing]: boolean;
|
|
||||||
[_ended]: boolean;
|
|
||||||
[_queue]: Buffer[];
|
|
||||||
[_needDrain]: boolean;
|
|
||||||
[_path]: string;
|
|
||||||
[_mode]: number;
|
|
||||||
[_autoClose]: boolean;
|
|
||||||
[_fd]?: number;
|
|
||||||
[_defaultFlag]: boolean;
|
|
||||||
[_flags]: string;
|
|
||||||
[_finished]: boolean;
|
|
||||||
[_pos]?: number;
|
|
||||||
constructor(path: string, opt: WriteStreamOptions);
|
|
||||||
emit(ev: string, ...args: any[]): boolean;
|
|
||||||
get fd(): number | undefined;
|
|
||||||
get path(): string;
|
|
||||||
[_onerror](er: NodeJS.ErrnoException): void;
|
|
||||||
[_open](): void;
|
|
||||||
[_onopen](er?: null | NodeJS.ErrnoException, fd?: number): void;
|
|
||||||
end(buf: string, enc?: BufferEncoding): this;
|
|
||||||
end(buf?: Buffer, enc?: undefined): this;
|
|
||||||
write(buf: string, enc?: BufferEncoding): boolean;
|
|
||||||
write(buf: Buffer, enc?: undefined): boolean;
|
|
||||||
[_write](buf: Buffer): void;
|
|
||||||
[_onwrite](er?: null | NodeJS.ErrnoException, bw?: number): void;
|
|
||||||
[_flush](): void;
|
|
||||||
[_close](): void;
|
|
||||||
}
|
|
||||||
export declare class WriteStreamSync extends WriteStream {
|
|
||||||
[_open](): void;
|
|
||||||
[_close](): void;
|
|
||||||
[_write](buf: Buffer): void;
|
|
||||||
}
|
|
||||||
export {};
|
|
||||||
//# sourceMappingURL=index.d.ts.map
|
|
||||||
1
node_modules/@isaacs/fs-minipass/dist/esm/index.d.ts.map
generated
vendored
1
node_modules/@isaacs/fs-minipass/dist/esm/index.d.ts.map
generated
vendored
@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;AAAA,OAAO,EAAE,MAAM,QAAQ,CAAA;AAEvB,OAAO,EAAE,QAAQ,EAAE,MAAM,UAAU,CAAA;AAInC,QAAA,MAAM,UAAU,eAAuB,CAAA;AACvC,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,GAAG,eAAgB,CAAA;AACzB,QAAA,MAAM,SAAS,eAAsB,CAAA;AACrC,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,YAAY,eAAyB,CAAA;AAC3C,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,UAAU,eAAuB,CAAA;AACvC,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,OAAO,eAAoB,CAAA;AACjC,QAAA,MAAM,OAAO,eAAoB,CAAA;AACjC,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,IAAI,eAAiB,CAAA;AAC3B,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,SAAS,eAAsB,CAAA;AACrC,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,OAAO,eAAoB,CAAA;AACjC,QAAA,MAAM,KAAK,eAAkB,CAAA;AAC7B,QAAA,MAAM,MAAM,eAAmB,CAAA;AAC/B,QAAA,MAAM,QAAQ,eAAqB,CAAA;AACnC,QAAA,MAAM,YAAY,eAAyB,CAAA;AAC3C,QAAA,MAAM,QAAQ,eAAqB,CAAA;AAEnC,MAAM,MAAM,iBAAiB,GAC3B,QAAQ,CAAC,OAAO,CAAC,QAAQ,CAAC,cAAc,CAAC,GAAG;IAC1C,EAAE,CAAC,EAAE,MAAM,CAAA;IACX,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,SAAS,CAAC,EAAE,OAAO,CAAA;CACpB,CAAA;AAEH,MAAM,MAAM,gBAAgB,GAAG,QAAQ,CAAC,MAAM,CAAC,QAAQ,CAAC,cAAc,CAAC,GAAG;IACxE,IAAI,EAAE,CAAC,EAAE,EAAE,MAAM,CAAC,CAAA;CACnB,CAAA;AAED,qBAAa,UAAW,SAAQ,QAAQ,CACtC,QAAQ,CAAC,cAAc,EACvB,MAAM,EACN,gBAAgB,CACjB;IACC,CAAC,QAAQ,CAAC,EAAE,OAAO,CAAS;IAC5B,CAAC,GAAG,CAAC,CAAC,EAAE,MAAM,CAAC;IACf,CAAC,KAAK,CAAC,EAAE,MAAM,CAAC;IAChB,CAAC,SAAS,CAAC,EAAE,MAAM,CAAC;IACpB,CAAC,QAAQ,CAAC,EAAE,OAAO,CAAS;IAC5B,CAAC,KAAK,CAAC,EAAE,MAAM,CAAC;IAChB,CAAC,OAAO,CAAC,EAAE,MAAM,CAAC;IAClB,CAAC,UAAU,CAAC,EAAE,OAAO,CAAA;gBAET,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,iBAAiB;IA4BhD,IAAI,EAAE,uBAEL;IAED,IAAI,IAAI,WAEP;IAGD,KAAK;IAKL,GAAG;IAIH,CAAC,KAAK,CAAC;IAIP,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,EAAE,CAAC,EAAE,MAAM;IAUxD,CAAC,QAAQ,CAAC;IAIV,CAAC,KAAK,CAAC;IAeP,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,EAAE,CAAC,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,MAAM;IAStE,CAAC,MAAM,CAAC;IAUR,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,MAAM,CAAC,cAAc;IAMpC,CAAC,YAAY,CAAC,CAAC,EAAE,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM;IAiBtC,IAAI,CAAC,KAAK,SAAS,MAAM,gBAAgB,EACvC,EAAE,EAAE,KAAK,EACT,GAAG,IAAI,EAAE,gBAAgB,CAAC,KAAK,CAAC,GAC/B,OAAO;CAuBX;AAED,qBAAa,cAAe,SAAQ,UAAU;IAC5C,CAAC,KAAK,CAAC;IAYP,CAAC,KAAK,CAAC;IA2BP,CAAC,MAAM,CAAC;CAQT;AAED,MAAM,MAAM,kBAAkB,GAAG;IAC/B,EAAE,CAAC,EAAE,MAAM,CAAA;IACX,SAAS,CAAC,EAAE,OAAO,CAAA;IACnB,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,iBAAiB,CAAC,EAAE,OAAO,CAAA;IAC3B,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,KAAK,CAAC,EAAE,MAAM,CAAA;CACf,CAAA;AAED,qBAAa,WAAY,SAAQ,EAAE;IACjC,QAAQ,EAAE,KAAK,CAAQ;IACvB,QAAQ,EAAE,OAAO,CAAQ;IACzB,CAAC,QAAQ,CAAC,EAAE,OAAO,CAAS;IAC5B,CAAC,QAAQ,CAAC,EAAE,OAAO,CAAS;IAC5B,CAAC,MAAM,CAAC,EAAE,OAAO,CAAS;IAC1B,CAAC,MAAM,CAAC,EAAE,MAAM,EAAE,CAAM;IACxB,CAAC,UAAU,CAAC,EAAE,OAAO,CAAS;IAC9B,CAAC,KAAK,CAAC,EAAE,MAAM,CAAC;IAChB,CAAC,KAAK,CAAC,EAAE,MAAM,CAAC;IAChB,CAAC,UAAU,CAAC,EAAE,OAAO,CAAC;IACtB,CAAC,GAAG,CAAC,CAAC,EAAE,MAAM,CAAC;IACf,CAAC,YAAY,CAAC,EAAE,OAAO,CAAC;IACxB,CAAC,MAAM,CAAC,EAAE,MAAM,CAAC;IACjB,CAAC,SAAS,CAAC,EAAE,OAAO,CAAS;IAC7B,CAAC,IAAI,CAAC,CAAC,EAAE,MAAM,CAAA;gBAEH,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,kBAAkB;IAoBjD,IAAI,CAAC,EAAE,EAAE,MAAM,EAAE,GAAG,IAAI,EAAE,GAAG,EAAE;IAU/B,IAAI,EAAE,uBAEL;IAED,IAAI,IAAI,WAEP;IAED,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,MAAM,CAAC,cAAc;IAMpC,CAAC,KAAK,CAAC;IAMP,CAAC,OAAO,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,GAAG,MAAM,CAAC,cAAc,EAAE,EAAE,CAAC,EAAE,MAAM;IAoBxD,GAAG,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,cAAc,GAAG,IAAI;IAC5C,GAAG,CAAC,GAAG,CAAC,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,SAAS,GAAG,IAAI;IAoBxC,KAAK,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,cAAc,GAAG,OAAO;IACjD,KAAK,CAAC,GAAG,EAAE,MAAM,EAAE,GAAG,CAAC,EAAE,SAAS,GAAG,OAAO;IAsB5C,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,MAAM;IAWpB,CAAC,QAAQ,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,GAAG,MAAM,CAAC,cAAc,EAAE,EAAE,CAAC,EAAE,MAAM;IAwBzD,CAAC,MAAM,CAAC;IAgBR,CAAC,MAAM,CAAC;CAST;AAED,qBAAa,eAAgB,SAAQ,WAAW;IAC9C,CAAC,KAAK,CAAC,IAAI,IAAI;IAsBf,CAAC,MAAM,CAAC;IASR,CAAC,MAAM,CAAC,CAAC,GAAG,EAAE,MAAM;CAmBrB"}
|
|
||||||
420
node_modules/@isaacs/fs-minipass/dist/esm/index.js
generated
vendored
420
node_modules/@isaacs/fs-minipass/dist/esm/index.js
generated
vendored
@ -1,420 +0,0 @@
|
|||||||
import EE from 'events';
|
|
||||||
import fs from 'fs';
|
|
||||||
import { Minipass } from 'minipass';
|
|
||||||
const writev = fs.writev;
|
|
||||||
const _autoClose = Symbol('_autoClose');
|
|
||||||
const _close = Symbol('_close');
|
|
||||||
const _ended = Symbol('_ended');
|
|
||||||
const _fd = Symbol('_fd');
|
|
||||||
const _finished = Symbol('_finished');
|
|
||||||
const _flags = Symbol('_flags');
|
|
||||||
const _flush = Symbol('_flush');
|
|
||||||
const _handleChunk = Symbol('_handleChunk');
|
|
||||||
const _makeBuf = Symbol('_makeBuf');
|
|
||||||
const _mode = Symbol('_mode');
|
|
||||||
const _needDrain = Symbol('_needDrain');
|
|
||||||
const _onerror = Symbol('_onerror');
|
|
||||||
const _onopen = Symbol('_onopen');
|
|
||||||
const _onread = Symbol('_onread');
|
|
||||||
const _onwrite = Symbol('_onwrite');
|
|
||||||
const _open = Symbol('_open');
|
|
||||||
const _path = Symbol('_path');
|
|
||||||
const _pos = Symbol('_pos');
|
|
||||||
const _queue = Symbol('_queue');
|
|
||||||
const _read = Symbol('_read');
|
|
||||||
const _readSize = Symbol('_readSize');
|
|
||||||
const _reading = Symbol('_reading');
|
|
||||||
const _remain = Symbol('_remain');
|
|
||||||
const _size = Symbol('_size');
|
|
||||||
const _write = Symbol('_write');
|
|
||||||
const _writing = Symbol('_writing');
|
|
||||||
const _defaultFlag = Symbol('_defaultFlag');
|
|
||||||
const _errored = Symbol('_errored');
|
|
||||||
export class ReadStream extends Minipass {
|
|
||||||
[_errored] = false;
|
|
||||||
[_fd];
|
|
||||||
[_path];
|
|
||||||
[_readSize];
|
|
||||||
[_reading] = false;
|
|
||||||
[_size];
|
|
||||||
[_remain];
|
|
||||||
[_autoClose];
|
|
||||||
constructor(path, opt) {
|
|
||||||
opt = opt || {};
|
|
||||||
super(opt);
|
|
||||||
this.readable = true;
|
|
||||||
this.writable = false;
|
|
||||||
if (typeof path !== 'string') {
|
|
||||||
throw new TypeError('path must be a string');
|
|
||||||
}
|
|
||||||
this[_errored] = false;
|
|
||||||
this[_fd] = typeof opt.fd === 'number' ? opt.fd : undefined;
|
|
||||||
this[_path] = path;
|
|
||||||
this[_readSize] = opt.readSize || 16 * 1024 * 1024;
|
|
||||||
this[_reading] = false;
|
|
||||||
this[_size] = typeof opt.size === 'number' ? opt.size : Infinity;
|
|
||||||
this[_remain] = this[_size];
|
|
||||||
this[_autoClose] =
|
|
||||||
typeof opt.autoClose === 'boolean' ? opt.autoClose : true;
|
|
||||||
if (typeof this[_fd] === 'number') {
|
|
||||||
this[_read]();
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
this[_open]();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
get fd() {
|
|
||||||
return this[_fd];
|
|
||||||
}
|
|
||||||
get path() {
|
|
||||||
return this[_path];
|
|
||||||
}
|
|
||||||
//@ts-ignore
|
|
||||||
write() {
|
|
||||||
throw new TypeError('this is a readable stream');
|
|
||||||
}
|
|
||||||
//@ts-ignore
|
|
||||||
end() {
|
|
||||||
throw new TypeError('this is a readable stream');
|
|
||||||
}
|
|
||||||
[_open]() {
|
|
||||||
fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd));
|
|
||||||
}
|
|
||||||
[_onopen](er, fd) {
|
|
||||||
if (er) {
|
|
||||||
this[_onerror](er);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
this[_fd] = fd;
|
|
||||||
this.emit('open', fd);
|
|
||||||
this[_read]();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
[_makeBuf]() {
|
|
||||||
return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain]));
|
|
||||||
}
|
|
||||||
[_read]() {
|
|
||||||
if (!this[_reading]) {
|
|
||||||
this[_reading] = true;
|
|
||||||
const buf = this[_makeBuf]();
|
|
||||||
/* c8 ignore start */
|
|
||||||
if (buf.length === 0) {
|
|
||||||
return process.nextTick(() => this[_onread](null, 0, buf));
|
|
||||||
}
|
|
||||||
/* c8 ignore stop */
|
|
||||||
fs.read(this[_fd], buf, 0, buf.length, null, (er, br, b) => this[_onread](er, br, b));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
[_onread](er, br, buf) {
|
|
||||||
this[_reading] = false;
|
|
||||||
if (er) {
|
|
||||||
this[_onerror](er);
|
|
||||||
}
|
|
||||||
else if (this[_handleChunk](br, buf)) {
|
|
||||||
this[_read]();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
[_close]() {
|
|
||||||
if (this[_autoClose] && typeof this[_fd] === 'number') {
|
|
||||||
const fd = this[_fd];
|
|
||||||
this[_fd] = undefined;
|
|
||||||
fs.close(fd, er => er ? this.emit('error', er) : this.emit('close'));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
[_onerror](er) {
|
|
||||||
this[_reading] = true;
|
|
||||||
this[_close]();
|
|
||||||
this.emit('error', er);
|
|
||||||
}
|
|
||||||
[_handleChunk](br, buf) {
|
|
||||||
let ret = false;
|
|
||||||
// no effect if infinite
|
|
||||||
this[_remain] -= br;
|
|
||||||
if (br > 0) {
|
|
||||||
ret = super.write(br < buf.length ? buf.subarray(0, br) : buf);
|
|
||||||
}
|
|
||||||
if (br === 0 || this[_remain] <= 0) {
|
|
||||||
ret = false;
|
|
||||||
this[_close]();
|
|
||||||
super.end();
|
|
||||||
}
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
emit(ev, ...args) {
|
|
||||||
switch (ev) {
|
|
||||||
case 'prefinish':
|
|
||||||
case 'finish':
|
|
||||||
return false;
|
|
||||||
case 'drain':
|
|
||||||
if (typeof this[_fd] === 'number') {
|
|
||||||
this[_read]();
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
case 'error':
|
|
||||||
if (this[_errored]) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
this[_errored] = true;
|
|
||||||
return super.emit(ev, ...args);
|
|
||||||
default:
|
|
||||||
return super.emit(ev, ...args);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
export class ReadStreamSync extends ReadStream {
|
|
||||||
[_open]() {
|
|
||||||
let threw = true;
|
|
||||||
try {
|
|
||||||
this[_onopen](null, fs.openSync(this[_path], 'r'));
|
|
||||||
threw = false;
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
if (threw) {
|
|
||||||
this[_close]();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
[_read]() {
|
|
||||||
let threw = true;
|
|
||||||
try {
|
|
||||||
if (!this[_reading]) {
|
|
||||||
this[_reading] = true;
|
|
||||||
do {
|
|
||||||
const buf = this[_makeBuf]();
|
|
||||||
/* c8 ignore start */
|
|
||||||
const br = buf.length === 0
|
|
||||||
? 0
|
|
||||||
: fs.readSync(this[_fd], buf, 0, buf.length, null);
|
|
||||||
/* c8 ignore stop */
|
|
||||||
if (!this[_handleChunk](br, buf)) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} while (true);
|
|
||||||
this[_reading] = false;
|
|
||||||
}
|
|
||||||
threw = false;
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
if (threw) {
|
|
||||||
this[_close]();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
[_close]() {
|
|
||||||
if (this[_autoClose] && typeof this[_fd] === 'number') {
|
|
||||||
const fd = this[_fd];
|
|
||||||
this[_fd] = undefined;
|
|
||||||
fs.closeSync(fd);
|
|
||||||
this.emit('close');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
export class WriteStream extends EE {
|
|
||||||
readable = false;
|
|
||||||
writable = true;
|
|
||||||
[_errored] = false;
|
|
||||||
[_writing] = false;
|
|
||||||
[_ended] = false;
|
|
||||||
[_queue] = [];
|
|
||||||
[_needDrain] = false;
|
|
||||||
[_path];
|
|
||||||
[_mode];
|
|
||||||
[_autoClose];
|
|
||||||
[_fd];
|
|
||||||
[_defaultFlag];
|
|
||||||
[_flags];
|
|
||||||
[_finished] = false;
|
|
||||||
[_pos];
|
|
||||||
constructor(path, opt) {
|
|
||||||
opt = opt || {};
|
|
||||||
super(opt);
|
|
||||||
this[_path] = path;
|
|
||||||
this[_fd] = typeof opt.fd === 'number' ? opt.fd : undefined;
|
|
||||||
this[_mode] = opt.mode === undefined ? 0o666 : opt.mode;
|
|
||||||
this[_pos] = typeof opt.start === 'number' ? opt.start : undefined;
|
|
||||||
this[_autoClose] =
|
|
||||||
typeof opt.autoClose === 'boolean' ? opt.autoClose : true;
|
|
||||||
// truncating makes no sense when writing into the middle
|
|
||||||
const defaultFlag = this[_pos] !== undefined ? 'r+' : 'w';
|
|
||||||
this[_defaultFlag] = opt.flags === undefined;
|
|
||||||
this[_flags] = opt.flags === undefined ? defaultFlag : opt.flags;
|
|
||||||
if (this[_fd] === undefined) {
|
|
||||||
this[_open]();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
emit(ev, ...args) {
|
|
||||||
if (ev === 'error') {
|
|
||||||
if (this[_errored]) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
this[_errored] = true;
|
|
||||||
}
|
|
||||||
return super.emit(ev, ...args);
|
|
||||||
}
|
|
||||||
get fd() {
|
|
||||||
return this[_fd];
|
|
||||||
}
|
|
||||||
get path() {
|
|
||||||
return this[_path];
|
|
||||||
}
|
|
||||||
[_onerror](er) {
|
|
||||||
this[_close]();
|
|
||||||
this[_writing] = true;
|
|
||||||
this.emit('error', er);
|
|
||||||
}
|
|
||||||
[_open]() {
|
|
||||||
fs.open(this[_path], this[_flags], this[_mode], (er, fd) => this[_onopen](er, fd));
|
|
||||||
}
|
|
||||||
[_onopen](er, fd) {
|
|
||||||
if (this[_defaultFlag] &&
|
|
||||||
this[_flags] === 'r+' &&
|
|
||||||
er &&
|
|
||||||
er.code === 'ENOENT') {
|
|
||||||
this[_flags] = 'w';
|
|
||||||
this[_open]();
|
|
||||||
}
|
|
||||||
else if (er) {
|
|
||||||
this[_onerror](er);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
this[_fd] = fd;
|
|
||||||
this.emit('open', fd);
|
|
||||||
if (!this[_writing]) {
|
|
||||||
this[_flush]();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
end(buf, enc) {
|
|
||||||
if (buf) {
|
|
||||||
//@ts-ignore
|
|
||||||
this.write(buf, enc);
|
|
||||||
}
|
|
||||||
this[_ended] = true;
|
|
||||||
// synthetic after-write logic, where drain/finish live
|
|
||||||
if (!this[_writing] &&
|
|
||||||
!this[_queue].length &&
|
|
||||||
typeof this[_fd] === 'number') {
|
|
||||||
this[_onwrite](null, 0);
|
|
||||||
}
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
write(buf, enc) {
|
|
||||||
if (typeof buf === 'string') {
|
|
||||||
buf = Buffer.from(buf, enc);
|
|
||||||
}
|
|
||||||
if (this[_ended]) {
|
|
||||||
this.emit('error', new Error('write() after end()'));
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (this[_fd] === undefined || this[_writing] || this[_queue].length) {
|
|
||||||
this[_queue].push(buf);
|
|
||||||
this[_needDrain] = true;
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
this[_writing] = true;
|
|
||||||
this[_write](buf);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
[_write](buf) {
|
|
||||||
fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) => this[_onwrite](er, bw));
|
|
||||||
}
|
|
||||||
[_onwrite](er, bw) {
|
|
||||||
if (er) {
|
|
||||||
this[_onerror](er);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
if (this[_pos] !== undefined && typeof bw === 'number') {
|
|
||||||
this[_pos] += bw;
|
|
||||||
}
|
|
||||||
if (this[_queue].length) {
|
|
||||||
this[_flush]();
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
this[_writing] = false;
|
|
||||||
if (this[_ended] && !this[_finished]) {
|
|
||||||
this[_finished] = true;
|
|
||||||
this[_close]();
|
|
||||||
this.emit('finish');
|
|
||||||
}
|
|
||||||
else if (this[_needDrain]) {
|
|
||||||
this[_needDrain] = false;
|
|
||||||
this.emit('drain');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
[_flush]() {
|
|
||||||
if (this[_queue].length === 0) {
|
|
||||||
if (this[_ended]) {
|
|
||||||
this[_onwrite](null, 0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (this[_queue].length === 1) {
|
|
||||||
this[_write](this[_queue].pop());
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
const iovec = this[_queue];
|
|
||||||
this[_queue] = [];
|
|
||||||
writev(this[_fd], iovec, this[_pos], (er, bw) => this[_onwrite](er, bw));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
[_close]() {
|
|
||||||
if (this[_autoClose] && typeof this[_fd] === 'number') {
|
|
||||||
const fd = this[_fd];
|
|
||||||
this[_fd] = undefined;
|
|
||||||
fs.close(fd, er => er ? this.emit('error', er) : this.emit('close'));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
export class WriteStreamSync extends WriteStream {
|
|
||||||
[_open]() {
|
|
||||||
let fd;
|
|
||||||
// only wrap in a try{} block if we know we'll retry, to avoid
|
|
||||||
// the rethrow obscuring the error's source frame in most cases.
|
|
||||||
if (this[_defaultFlag] && this[_flags] === 'r+') {
|
|
||||||
try {
|
|
||||||
fd = fs.openSync(this[_path], this[_flags], this[_mode]);
|
|
||||||
}
|
|
||||||
catch (er) {
|
|
||||||
if (er?.code === 'ENOENT') {
|
|
||||||
this[_flags] = 'w';
|
|
||||||
return this[_open]();
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
throw er;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
fd = fs.openSync(this[_path], this[_flags], this[_mode]);
|
|
||||||
}
|
|
||||||
this[_onopen](null, fd);
|
|
||||||
}
|
|
||||||
[_close]() {
|
|
||||||
if (this[_autoClose] && typeof this[_fd] === 'number') {
|
|
||||||
const fd = this[_fd];
|
|
||||||
this[_fd] = undefined;
|
|
||||||
fs.closeSync(fd);
|
|
||||||
this.emit('close');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
[_write](buf) {
|
|
||||||
// throw the original, but try to close if it fails
|
|
||||||
let threw = true;
|
|
||||||
try {
|
|
||||||
this[_onwrite](null, fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos]));
|
|
||||||
threw = false;
|
|
||||||
}
|
|
||||||
finally {
|
|
||||||
if (threw) {
|
|
||||||
try {
|
|
||||||
this[_close]();
|
|
||||||
}
|
|
||||||
catch {
|
|
||||||
// ok error
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
//# sourceMappingURL=index.js.map
|
|
||||||
1
node_modules/@isaacs/fs-minipass/dist/esm/index.js.map
generated
vendored
1
node_modules/@isaacs/fs-minipass/dist/esm/index.js.map
generated
vendored
File diff suppressed because one or more lines are too long
3
node_modules/@isaacs/fs-minipass/dist/esm/package.json
generated
vendored
3
node_modules/@isaacs/fs-minipass/dist/esm/package.json
generated
vendored
@ -1,3 +0,0 @@
|
|||||||
{
|
|
||||||
"type": "module"
|
|
||||||
}
|
|
||||||
72
node_modules/@isaacs/fs-minipass/package.json
generated
vendored
72
node_modules/@isaacs/fs-minipass/package.json
generated
vendored
@ -1,72 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@isaacs/fs-minipass",
|
|
||||||
"version": "4.0.1",
|
|
||||||
"main": "./dist/commonjs/index.js",
|
|
||||||
"scripts": {
|
|
||||||
"prepare": "tshy",
|
|
||||||
"pretest": "npm run prepare",
|
|
||||||
"test": "tap",
|
|
||||||
"preversion": "npm test",
|
|
||||||
"postversion": "npm publish",
|
|
||||||
"prepublishOnly": "git push origin --follow-tags",
|
|
||||||
"format": "prettier --write . --loglevel warn",
|
|
||||||
"typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
|
|
||||||
},
|
|
||||||
"keywords": [],
|
|
||||||
"author": "Isaac Z. Schlueter",
|
|
||||||
"license": "ISC",
|
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "https://github.com/npm/fs-minipass.git"
|
|
||||||
},
|
|
||||||
"description": "fs read and write streams based on minipass",
|
|
||||||
"dependencies": {
|
|
||||||
"minipass": "^7.0.4"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@types/node": "^20.11.30",
|
|
||||||
"mutate-fs": "^2.1.1",
|
|
||||||
"prettier": "^3.2.5",
|
|
||||||
"tap": "^18.7.1",
|
|
||||||
"tshy": "^1.12.0",
|
|
||||||
"typedoc": "^0.25.12"
|
|
||||||
},
|
|
||||||
"files": [
|
|
||||||
"dist"
|
|
||||||
],
|
|
||||||
"engines": {
|
|
||||||
"node": ">=18.0.0"
|
|
||||||
},
|
|
||||||
"tshy": {
|
|
||||||
"exports": {
|
|
||||||
"./package.json": "./package.json",
|
|
||||||
".": "./src/index.ts"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"exports": {
|
|
||||||
"./package.json": "./package.json",
|
|
||||||
".": {
|
|
||||||
"import": {
|
|
||||||
"types": "./dist/esm/index.d.ts",
|
|
||||||
"default": "./dist/esm/index.js"
|
|
||||||
},
|
|
||||||
"require": {
|
|
||||||
"types": "./dist/commonjs/index.d.ts",
|
|
||||||
"default": "./dist/commonjs/index.js"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"types": "./dist/commonjs/index.d.ts",
|
|
||||||
"type": "module",
|
|
||||||
"prettier": {
|
|
||||||
"semi": false,
|
|
||||||
"printWidth": 75,
|
|
||||||
"tabWidth": 2,
|
|
||||||
"useTabs": false,
|
|
||||||
"singleQuote": true,
|
|
||||||
"jsxSingleQuote": false,
|
|
||||||
"bracketSameLine": true,
|
|
||||||
"arrowParens": "avoid",
|
|
||||||
"endOfLine": "lf"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
19
node_modules/@jridgewell/gen-mapping/LICENSE
generated
vendored
19
node_modules/@jridgewell/gen-mapping/LICENSE
generated
vendored
@ -1,19 +0,0 @@
|
|||||||
Copyright 2022 Justin Ridgewell <jridgewell@google.com>
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
|
||||||
all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
227
node_modules/@jridgewell/gen-mapping/README.md
generated
vendored
227
node_modules/@jridgewell/gen-mapping/README.md
generated
vendored
@ -1,227 +0,0 @@
|
|||||||
# @jridgewell/gen-mapping
|
|
||||||
|
|
||||||
> Generate source maps
|
|
||||||
|
|
||||||
`gen-mapping` allows you to generate a source map during transpilation or minification.
|
|
||||||
With a source map, you're able to trace the original location in the source file, either in Chrome's
|
|
||||||
DevTools or using a library like [`@jridgewell/trace-mapping`][trace-mapping].
|
|
||||||
|
|
||||||
You may already be familiar with the [`source-map`][source-map] package's `SourceMapGenerator`. This
|
|
||||||
provides the same `addMapping` and `setSourceContent` API.
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
```sh
|
|
||||||
npm install @jridgewell/gen-mapping
|
|
||||||
```
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { GenMapping, addMapping, setSourceContent, toEncodedMap, toDecodedMap } from '@jridgewell/gen-mapping';
|
|
||||||
|
|
||||||
const map = new GenMapping({
|
|
||||||
file: 'output.js',
|
|
||||||
sourceRoot: 'https://example.com/',
|
|
||||||
});
|
|
||||||
|
|
||||||
setSourceContent(map, 'input.js', `function foo() {}`);
|
|
||||||
|
|
||||||
addMapping(map, {
|
|
||||||
// Lines start at line 1, columns at column 0.
|
|
||||||
generated: { line: 1, column: 0 },
|
|
||||||
source: 'input.js',
|
|
||||||
original: { line: 1, column: 0 },
|
|
||||||
});
|
|
||||||
|
|
||||||
addMapping(map, {
|
|
||||||
generated: { line: 1, column: 9 },
|
|
||||||
source: 'input.js',
|
|
||||||
original: { line: 1, column: 9 },
|
|
||||||
name: 'foo',
|
|
||||||
});
|
|
||||||
|
|
||||||
assert.deepEqual(toDecodedMap(map), {
|
|
||||||
version: 3,
|
|
||||||
file: 'output.js',
|
|
||||||
names: ['foo'],
|
|
||||||
sourceRoot: 'https://example.com/',
|
|
||||||
sources: ['input.js'],
|
|
||||||
sourcesContent: ['function foo() {}'],
|
|
||||||
mappings: [
|
|
||||||
[ [0, 0, 0, 0], [9, 0, 0, 9, 0] ]
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
assert.deepEqual(toEncodedMap(map), {
|
|
||||||
version: 3,
|
|
||||||
file: 'output.js',
|
|
||||||
names: ['foo'],
|
|
||||||
sourceRoot: 'https://example.com/',
|
|
||||||
sources: ['input.js'],
|
|
||||||
sourcesContent: ['function foo() {}'],
|
|
||||||
mappings: 'AAAA,SAASA',
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
### Smaller Sourcemaps
|
|
||||||
|
|
||||||
Not everything needs to be added to a sourcemap, and needless markings can cause signficantly
|
|
||||||
larger file sizes. `gen-mapping` exposes `maybeAddSegment`/`maybeAddMapping` APIs that will
|
|
||||||
intelligently determine if this marking adds useful information. If not, the marking will be
|
|
||||||
skipped.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { maybeAddMapping } from '@jridgewell/gen-mapping';
|
|
||||||
|
|
||||||
const map = new GenMapping();
|
|
||||||
|
|
||||||
// Adding a sourceless marking at the beginning of a line isn't useful.
|
|
||||||
maybeAddMapping(map, {
|
|
||||||
generated: { line: 1, column: 0 },
|
|
||||||
});
|
|
||||||
|
|
||||||
// Adding a new source marking is useful.
|
|
||||||
maybeAddMapping(map, {
|
|
||||||
generated: { line: 1, column: 0 },
|
|
||||||
source: 'input.js',
|
|
||||||
original: { line: 1, column: 0 },
|
|
||||||
});
|
|
||||||
|
|
||||||
// But adding another marking pointing to the exact same original location isn't, even if the
|
|
||||||
// generated column changed.
|
|
||||||
maybeAddMapping(map, {
|
|
||||||
generated: { line: 1, column: 9 },
|
|
||||||
source: 'input.js',
|
|
||||||
original: { line: 1, column: 0 },
|
|
||||||
});
|
|
||||||
|
|
||||||
assert.deepEqual(toEncodedMap(map), {
|
|
||||||
version: 3,
|
|
||||||
names: [],
|
|
||||||
sources: ['input.js'],
|
|
||||||
sourcesContent: [null],
|
|
||||||
mappings: 'AAAA',
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
## Benchmarks
|
|
||||||
|
|
||||||
```
|
|
||||||
node v18.0.0
|
|
||||||
|
|
||||||
amp.js.map
|
|
||||||
Memory Usage:
|
|
||||||
gen-mapping: addSegment 5852872 bytes
|
|
||||||
gen-mapping: addMapping 7716042 bytes
|
|
||||||
source-map-js 6143250 bytes
|
|
||||||
source-map-0.6.1 6124102 bytes
|
|
||||||
source-map-0.8.0 6121173 bytes
|
|
||||||
Smallest memory usage is gen-mapping: addSegment
|
|
||||||
|
|
||||||
Adding speed:
|
|
||||||
gen-mapping: addSegment x 441 ops/sec ±2.07% (90 runs sampled)
|
|
||||||
gen-mapping: addMapping x 350 ops/sec ±2.40% (86 runs sampled)
|
|
||||||
source-map-js: addMapping x 169 ops/sec ±2.42% (80 runs sampled)
|
|
||||||
source-map-0.6.1: addMapping x 167 ops/sec ±2.56% (80 runs sampled)
|
|
||||||
source-map-0.8.0: addMapping x 168 ops/sec ±2.52% (80 runs sampled)
|
|
||||||
Fastest is gen-mapping: addSegment
|
|
||||||
|
|
||||||
Generate speed:
|
|
||||||
gen-mapping: decoded output x 150,824,370 ops/sec ±0.07% (102 runs sampled)
|
|
||||||
gen-mapping: encoded output x 663 ops/sec ±0.22% (98 runs sampled)
|
|
||||||
source-map-js: encoded output x 197 ops/sec ±0.45% (84 runs sampled)
|
|
||||||
source-map-0.6.1: encoded output x 198 ops/sec ±0.33% (85 runs sampled)
|
|
||||||
source-map-0.8.0: encoded output x 197 ops/sec ±0.06% (93 runs sampled)
|
|
||||||
Fastest is gen-mapping: decoded output
|
|
||||||
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
|
|
||||||
babel.min.js.map
|
|
||||||
Memory Usage:
|
|
||||||
gen-mapping: addSegment 37578063 bytes
|
|
||||||
gen-mapping: addMapping 37212897 bytes
|
|
||||||
source-map-js 47638527 bytes
|
|
||||||
source-map-0.6.1 47690503 bytes
|
|
||||||
source-map-0.8.0 47470188 bytes
|
|
||||||
Smallest memory usage is gen-mapping: addMapping
|
|
||||||
|
|
||||||
Adding speed:
|
|
||||||
gen-mapping: addSegment x 31.05 ops/sec ±8.31% (43 runs sampled)
|
|
||||||
gen-mapping: addMapping x 29.83 ops/sec ±7.36% (51 runs sampled)
|
|
||||||
source-map-js: addMapping x 20.73 ops/sec ±6.22% (38 runs sampled)
|
|
||||||
source-map-0.6.1: addMapping x 20.03 ops/sec ±10.51% (38 runs sampled)
|
|
||||||
source-map-0.8.0: addMapping x 19.30 ops/sec ±8.27% (37 runs sampled)
|
|
||||||
Fastest is gen-mapping: addSegment
|
|
||||||
|
|
||||||
Generate speed:
|
|
||||||
gen-mapping: decoded output x 381,379,234 ops/sec ±0.29% (96 runs sampled)
|
|
||||||
gen-mapping: encoded output x 95.15 ops/sec ±2.98% (72 runs sampled)
|
|
||||||
source-map-js: encoded output x 15.20 ops/sec ±7.41% (33 runs sampled)
|
|
||||||
source-map-0.6.1: encoded output x 16.36 ops/sec ±10.46% (31 runs sampled)
|
|
||||||
source-map-0.8.0: encoded output x 16.06 ops/sec ±6.45% (31 runs sampled)
|
|
||||||
Fastest is gen-mapping: decoded output
|
|
||||||
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
|
|
||||||
preact.js.map
|
|
||||||
Memory Usage:
|
|
||||||
gen-mapping: addSegment 416247 bytes
|
|
||||||
gen-mapping: addMapping 419824 bytes
|
|
||||||
source-map-js 1024619 bytes
|
|
||||||
source-map-0.6.1 1146004 bytes
|
|
||||||
source-map-0.8.0 1113250 bytes
|
|
||||||
Smallest memory usage is gen-mapping: addSegment
|
|
||||||
|
|
||||||
Adding speed:
|
|
||||||
gen-mapping: addSegment x 13,755 ops/sec ±0.15% (98 runs sampled)
|
|
||||||
gen-mapping: addMapping x 13,013 ops/sec ±0.11% (101 runs sampled)
|
|
||||||
source-map-js: addMapping x 4,564 ops/sec ±0.21% (98 runs sampled)
|
|
||||||
source-map-0.6.1: addMapping x 4,562 ops/sec ±0.11% (99 runs sampled)
|
|
||||||
source-map-0.8.0: addMapping x 4,593 ops/sec ±0.11% (100 runs sampled)
|
|
||||||
Fastest is gen-mapping: addSegment
|
|
||||||
|
|
||||||
Generate speed:
|
|
||||||
gen-mapping: decoded output x 379,864,020 ops/sec ±0.23% (93 runs sampled)
|
|
||||||
gen-mapping: encoded output x 14,368 ops/sec ±4.07% (82 runs sampled)
|
|
||||||
source-map-js: encoded output x 5,261 ops/sec ±0.21% (99 runs sampled)
|
|
||||||
source-map-0.6.1: encoded output x 5,124 ops/sec ±0.58% (99 runs sampled)
|
|
||||||
source-map-0.8.0: encoded output x 5,434 ops/sec ±0.33% (96 runs sampled)
|
|
||||||
Fastest is gen-mapping: decoded output
|
|
||||||
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
|
|
||||||
react.js.map
|
|
||||||
Memory Usage:
|
|
||||||
gen-mapping: addSegment 975096 bytes
|
|
||||||
gen-mapping: addMapping 1102981 bytes
|
|
||||||
source-map-js 2918836 bytes
|
|
||||||
source-map-0.6.1 2885435 bytes
|
|
||||||
source-map-0.8.0 2874336 bytes
|
|
||||||
Smallest memory usage is gen-mapping: addSegment
|
|
||||||
|
|
||||||
Adding speed:
|
|
||||||
gen-mapping: addSegment x 4,772 ops/sec ±0.15% (100 runs sampled)
|
|
||||||
gen-mapping: addMapping x 4,456 ops/sec ±0.13% (97 runs sampled)
|
|
||||||
source-map-js: addMapping x 1,618 ops/sec ±0.24% (97 runs sampled)
|
|
||||||
source-map-0.6.1: addMapping x 1,622 ops/sec ±0.12% (99 runs sampled)
|
|
||||||
source-map-0.8.0: addMapping x 1,631 ops/sec ±0.12% (100 runs sampled)
|
|
||||||
Fastest is gen-mapping: addSegment
|
|
||||||
|
|
||||||
Generate speed:
|
|
||||||
gen-mapping: decoded output x 379,107,695 ops/sec ±0.07% (99 runs sampled)
|
|
||||||
gen-mapping: encoded output x 5,421 ops/sec ±1.60% (89 runs sampled)
|
|
||||||
source-map-js: encoded output x 2,113 ops/sec ±1.81% (98 runs sampled)
|
|
||||||
source-map-0.6.1: encoded output x 2,126 ops/sec ±0.10% (100 runs sampled)
|
|
||||||
source-map-0.8.0: encoded output x 2,176 ops/sec ±0.39% (98 runs sampled)
|
|
||||||
Fastest is gen-mapping: decoded output
|
|
||||||
```
|
|
||||||
|
|
||||||
[source-map]: https://www.npmjs.com/package/source-map
|
|
||||||
[trace-mapping]: https://github.com/jridgewell/trace-mapping
|
|
||||||
230
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs
generated
vendored
230
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs
generated
vendored
@ -1,230 +0,0 @@
|
|||||||
import { SetArray, put, remove } from '@jridgewell/set-array';
|
|
||||||
import { encode } from '@jridgewell/sourcemap-codec';
|
|
||||||
import { TraceMap, decodedMappings } from '@jridgewell/trace-mapping';
|
|
||||||
|
|
||||||
const COLUMN = 0;
|
|
||||||
const SOURCES_INDEX = 1;
|
|
||||||
const SOURCE_LINE = 2;
|
|
||||||
const SOURCE_COLUMN = 3;
|
|
||||||
const NAMES_INDEX = 4;
|
|
||||||
|
|
||||||
const NO_NAME = -1;
|
|
||||||
/**
|
|
||||||
* Provides the state to generate a sourcemap.
|
|
||||||
*/
|
|
||||||
class GenMapping {
|
|
||||||
constructor({ file, sourceRoot } = {}) {
|
|
||||||
this._names = new SetArray();
|
|
||||||
this._sources = new SetArray();
|
|
||||||
this._sourcesContent = [];
|
|
||||||
this._mappings = [];
|
|
||||||
this.file = file;
|
|
||||||
this.sourceRoot = sourceRoot;
|
|
||||||
this._ignoreList = new SetArray();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
|
||||||
* with public access modifiers.
|
|
||||||
*/
|
|
||||||
function cast(map) {
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
function addSegment(map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
|
||||||
return addSegmentInternal(false, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
|
||||||
}
|
|
||||||
function addMapping(map, mapping) {
|
|
||||||
return addMappingInternal(false, map, mapping);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
|
||||||
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
|
||||||
* not add a segment with a lower generated line/column than one that came before.
|
|
||||||
*/
|
|
||||||
const maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
|
||||||
return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
|
||||||
};
|
|
||||||
/**
|
|
||||||
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
|
||||||
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
|
||||||
* not add a mapping with a lower generated line/column than one that came before.
|
|
||||||
*/
|
|
||||||
const maybeAddMapping = (map, mapping) => {
|
|
||||||
return addMappingInternal(true, map, mapping);
|
|
||||||
};
|
|
||||||
/**
|
|
||||||
* Adds/removes the content of the source file to the source map.
|
|
||||||
*/
|
|
||||||
function setSourceContent(map, source, content) {
|
|
||||||
const { _sources: sources, _sourcesContent: sourcesContent } = cast(map);
|
|
||||||
const index = put(sources, source);
|
|
||||||
sourcesContent[index] = content;
|
|
||||||
}
|
|
||||||
function setIgnore(map, source, ignore = true) {
|
|
||||||
const { _sources: sources, _sourcesContent: sourcesContent, _ignoreList: ignoreList } = cast(map);
|
|
||||||
const index = put(sources, source);
|
|
||||||
if (index === sourcesContent.length)
|
|
||||||
sourcesContent[index] = null;
|
|
||||||
if (ignore)
|
|
||||||
put(ignoreList, index);
|
|
||||||
else
|
|
||||||
remove(ignoreList, index);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
|
||||||
* a sourcemap, or to JSON.stringify.
|
|
||||||
*/
|
|
||||||
function toDecodedMap(map) {
|
|
||||||
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, _ignoreList: ignoreList, } = cast(map);
|
|
||||||
removeEmptyFinalLines(mappings);
|
|
||||||
return {
|
|
||||||
version: 3,
|
|
||||||
file: map.file || undefined,
|
|
||||||
names: names.array,
|
|
||||||
sourceRoot: map.sourceRoot || undefined,
|
|
||||||
sources: sources.array,
|
|
||||||
sourcesContent,
|
|
||||||
mappings,
|
|
||||||
ignoreList: ignoreList.array,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
|
||||||
* a sourcemap, or to JSON.stringify.
|
|
||||||
*/
|
|
||||||
function toEncodedMap(map) {
|
|
||||||
const decoded = toDecodedMap(map);
|
|
||||||
return Object.assign(Object.assign({}, decoded), { mappings: encode(decoded.mappings) });
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Constructs a new GenMapping, using the already present mappings of the input.
|
|
||||||
*/
|
|
||||||
function fromMap(input) {
|
|
||||||
const map = new TraceMap(input);
|
|
||||||
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
|
|
||||||
putAll(cast(gen)._names, map.names);
|
|
||||||
putAll(cast(gen)._sources, map.sources);
|
|
||||||
cast(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
|
|
||||||
cast(gen)._mappings = decodedMappings(map);
|
|
||||||
if (map.ignoreList)
|
|
||||||
putAll(cast(gen)._ignoreList, map.ignoreList);
|
|
||||||
return gen;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
|
||||||
* passed to the `source-map` library.
|
|
||||||
*/
|
|
||||||
function allMappings(map) {
|
|
||||||
const out = [];
|
|
||||||
const { _mappings: mappings, _sources: sources, _names: names } = cast(map);
|
|
||||||
for (let i = 0; i < mappings.length; i++) {
|
|
||||||
const line = mappings[i];
|
|
||||||
for (let j = 0; j < line.length; j++) {
|
|
||||||
const seg = line[j];
|
|
||||||
const generated = { line: i + 1, column: seg[COLUMN] };
|
|
||||||
let source = undefined;
|
|
||||||
let original = undefined;
|
|
||||||
let name = undefined;
|
|
||||||
if (seg.length !== 1) {
|
|
||||||
source = sources.array[seg[SOURCES_INDEX]];
|
|
||||||
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
|
|
||||||
if (seg.length === 5)
|
|
||||||
name = names.array[seg[NAMES_INDEX]];
|
|
||||||
}
|
|
||||||
out.push({ generated, source, original, name });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return out;
|
|
||||||
}
|
|
||||||
// This split declaration is only so that terser can elminiate the static initialization block.
|
|
||||||
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
|
||||||
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = cast(map);
|
|
||||||
const line = getLine(mappings, genLine);
|
|
||||||
const index = getColumnIndex(line, genColumn);
|
|
||||||
if (!source) {
|
|
||||||
if (skipable && skipSourceless(line, index))
|
|
||||||
return;
|
|
||||||
return insert(line, index, [genColumn]);
|
|
||||||
}
|
|
||||||
const sourcesIndex = put(sources, source);
|
|
||||||
const namesIndex = name ? put(names, name) : NO_NAME;
|
|
||||||
if (sourcesIndex === sourcesContent.length)
|
|
||||||
sourcesContent[sourcesIndex] = content !== null && content !== void 0 ? content : null;
|
|
||||||
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
return insert(line, index, name
|
|
||||||
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
|
||||||
: [genColumn, sourcesIndex, sourceLine, sourceColumn]);
|
|
||||||
}
|
|
||||||
function getLine(mappings, index) {
|
|
||||||
for (let i = mappings.length; i <= index; i++) {
|
|
||||||
mappings[i] = [];
|
|
||||||
}
|
|
||||||
return mappings[index];
|
|
||||||
}
|
|
||||||
function getColumnIndex(line, genColumn) {
|
|
||||||
let index = line.length;
|
|
||||||
for (let i = index - 1; i >= 0; index = i--) {
|
|
||||||
const current = line[i];
|
|
||||||
if (genColumn >= current[COLUMN])
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
return index;
|
|
||||||
}
|
|
||||||
function insert(array, index, value) {
|
|
||||||
for (let i = array.length; i > index; i--) {
|
|
||||||
array[i] = array[i - 1];
|
|
||||||
}
|
|
||||||
array[index] = value;
|
|
||||||
}
|
|
||||||
function removeEmptyFinalLines(mappings) {
|
|
||||||
const { length } = mappings;
|
|
||||||
let len = length;
|
|
||||||
for (let i = len - 1; i >= 0; len = i, i--) {
|
|
||||||
if (mappings[i].length > 0)
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (len < length)
|
|
||||||
mappings.length = len;
|
|
||||||
}
|
|
||||||
function putAll(setarr, array) {
|
|
||||||
for (let i = 0; i < array.length; i++)
|
|
||||||
put(setarr, array[i]);
|
|
||||||
}
|
|
||||||
function skipSourceless(line, index) {
|
|
||||||
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
|
|
||||||
// doesn't generate any useful information.
|
|
||||||
if (index === 0)
|
|
||||||
return true;
|
|
||||||
const prev = line[index - 1];
|
|
||||||
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
|
|
||||||
// genrate any new information. Else, this segment will end the source/named segment and point to
|
|
||||||
// a sourceless position, which is useful.
|
|
||||||
return prev.length === 1;
|
|
||||||
}
|
|
||||||
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
|
|
||||||
// A source/named segment at the start of a line gives position at that genColumn
|
|
||||||
if (index === 0)
|
|
||||||
return false;
|
|
||||||
const prev = line[index - 1];
|
|
||||||
// If the previous segment is sourceless, then we're transitioning to a source.
|
|
||||||
if (prev.length === 1)
|
|
||||||
return false;
|
|
||||||
// If the previous segment maps to the exact same source position, then this segment doesn't
|
|
||||||
// provide any new position information.
|
|
||||||
return (sourcesIndex === prev[SOURCES_INDEX] &&
|
|
||||||
sourceLine === prev[SOURCE_LINE] &&
|
|
||||||
sourceColumn === prev[SOURCE_COLUMN] &&
|
|
||||||
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME));
|
|
||||||
}
|
|
||||||
function addMappingInternal(skipable, map, mapping) {
|
|
||||||
const { generated, source, original, name, content } = mapping;
|
|
||||||
if (!source) {
|
|
||||||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null, null);
|
|
||||||
}
|
|
||||||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, source, original.line - 1, original.column, name, content);
|
|
||||||
}
|
|
||||||
|
|
||||||
export { GenMapping, addMapping, addSegment, allMappings, fromMap, maybeAddMapping, maybeAddSegment, setIgnore, setSourceContent, toDecodedMap, toEncodedMap };
|
|
||||||
//# sourceMappingURL=gen-mapping.mjs.map
|
|
||||||
1
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs.map
generated
vendored
1
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs.map
generated
vendored
File diff suppressed because one or more lines are too long
246
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js
generated
vendored
246
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js
generated
vendored
@ -1,246 +0,0 @@
|
|||||||
(function (global, factory) {
|
|
||||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/set-array'), require('@jridgewell/sourcemap-codec'), require('@jridgewell/trace-mapping')) :
|
|
||||||
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/set-array', '@jridgewell/sourcemap-codec', '@jridgewell/trace-mapping'], factory) :
|
|
||||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.genMapping = {}, global.setArray, global.sourcemapCodec, global.traceMapping));
|
|
||||||
})(this, (function (exports, setArray, sourcemapCodec, traceMapping) { 'use strict';
|
|
||||||
|
|
||||||
const COLUMN = 0;
|
|
||||||
const SOURCES_INDEX = 1;
|
|
||||||
const SOURCE_LINE = 2;
|
|
||||||
const SOURCE_COLUMN = 3;
|
|
||||||
const NAMES_INDEX = 4;
|
|
||||||
|
|
||||||
const NO_NAME = -1;
|
|
||||||
/**
|
|
||||||
* Provides the state to generate a sourcemap.
|
|
||||||
*/
|
|
||||||
class GenMapping {
|
|
||||||
constructor({ file, sourceRoot } = {}) {
|
|
||||||
this._names = new setArray.SetArray();
|
|
||||||
this._sources = new setArray.SetArray();
|
|
||||||
this._sourcesContent = [];
|
|
||||||
this._mappings = [];
|
|
||||||
this.file = file;
|
|
||||||
this.sourceRoot = sourceRoot;
|
|
||||||
this._ignoreList = new setArray.SetArray();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
|
||||||
* with public access modifiers.
|
|
||||||
*/
|
|
||||||
function cast(map) {
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
function addSegment(map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
|
||||||
return addSegmentInternal(false, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
|
||||||
}
|
|
||||||
function addMapping(map, mapping) {
|
|
||||||
return addMappingInternal(false, map, mapping);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
|
||||||
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
|
||||||
* not add a segment with a lower generated line/column than one that came before.
|
|
||||||
*/
|
|
||||||
const maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
|
||||||
return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
|
||||||
};
|
|
||||||
/**
|
|
||||||
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
|
||||||
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
|
||||||
* not add a mapping with a lower generated line/column than one that came before.
|
|
||||||
*/
|
|
||||||
const maybeAddMapping = (map, mapping) => {
|
|
||||||
return addMappingInternal(true, map, mapping);
|
|
||||||
};
|
|
||||||
/**
|
|
||||||
* Adds/removes the content of the source file to the source map.
|
|
||||||
*/
|
|
||||||
function setSourceContent(map, source, content) {
|
|
||||||
const { _sources: sources, _sourcesContent: sourcesContent } = cast(map);
|
|
||||||
const index = setArray.put(sources, source);
|
|
||||||
sourcesContent[index] = content;
|
|
||||||
}
|
|
||||||
function setIgnore(map, source, ignore = true) {
|
|
||||||
const { _sources: sources, _sourcesContent: sourcesContent, _ignoreList: ignoreList } = cast(map);
|
|
||||||
const index = setArray.put(sources, source);
|
|
||||||
if (index === sourcesContent.length)
|
|
||||||
sourcesContent[index] = null;
|
|
||||||
if (ignore)
|
|
||||||
setArray.put(ignoreList, index);
|
|
||||||
else
|
|
||||||
setArray.remove(ignoreList, index);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
|
||||||
* a sourcemap, or to JSON.stringify.
|
|
||||||
*/
|
|
||||||
function toDecodedMap(map) {
|
|
||||||
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, _ignoreList: ignoreList, } = cast(map);
|
|
||||||
removeEmptyFinalLines(mappings);
|
|
||||||
return {
|
|
||||||
version: 3,
|
|
||||||
file: map.file || undefined,
|
|
||||||
names: names.array,
|
|
||||||
sourceRoot: map.sourceRoot || undefined,
|
|
||||||
sources: sources.array,
|
|
||||||
sourcesContent,
|
|
||||||
mappings,
|
|
||||||
ignoreList: ignoreList.array,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
|
||||||
* a sourcemap, or to JSON.stringify.
|
|
||||||
*/
|
|
||||||
function toEncodedMap(map) {
|
|
||||||
const decoded = toDecodedMap(map);
|
|
||||||
return Object.assign(Object.assign({}, decoded), { mappings: sourcemapCodec.encode(decoded.mappings) });
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Constructs a new GenMapping, using the already present mappings of the input.
|
|
||||||
*/
|
|
||||||
function fromMap(input) {
|
|
||||||
const map = new traceMapping.TraceMap(input);
|
|
||||||
const gen = new GenMapping({ file: map.file, sourceRoot: map.sourceRoot });
|
|
||||||
putAll(cast(gen)._names, map.names);
|
|
||||||
putAll(cast(gen)._sources, map.sources);
|
|
||||||
cast(gen)._sourcesContent = map.sourcesContent || map.sources.map(() => null);
|
|
||||||
cast(gen)._mappings = traceMapping.decodedMappings(map);
|
|
||||||
if (map.ignoreList)
|
|
||||||
putAll(cast(gen)._ignoreList, map.ignoreList);
|
|
||||||
return gen;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
|
||||||
* passed to the `source-map` library.
|
|
||||||
*/
|
|
||||||
function allMappings(map) {
|
|
||||||
const out = [];
|
|
||||||
const { _mappings: mappings, _sources: sources, _names: names } = cast(map);
|
|
||||||
for (let i = 0; i < mappings.length; i++) {
|
|
||||||
const line = mappings[i];
|
|
||||||
for (let j = 0; j < line.length; j++) {
|
|
||||||
const seg = line[j];
|
|
||||||
const generated = { line: i + 1, column: seg[COLUMN] };
|
|
||||||
let source = undefined;
|
|
||||||
let original = undefined;
|
|
||||||
let name = undefined;
|
|
||||||
if (seg.length !== 1) {
|
|
||||||
source = sources.array[seg[SOURCES_INDEX]];
|
|
||||||
original = { line: seg[SOURCE_LINE] + 1, column: seg[SOURCE_COLUMN] };
|
|
||||||
if (seg.length === 5)
|
|
||||||
name = names.array[seg[NAMES_INDEX]];
|
|
||||||
}
|
|
||||||
out.push({ generated, source, original, name });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return out;
|
|
||||||
}
|
|
||||||
// This split declaration is only so that terser can elminiate the static initialization block.
|
|
||||||
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
|
||||||
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, } = cast(map);
|
|
||||||
const line = getLine(mappings, genLine);
|
|
||||||
const index = getColumnIndex(line, genColumn);
|
|
||||||
if (!source) {
|
|
||||||
if (skipable && skipSourceless(line, index))
|
|
||||||
return;
|
|
||||||
return insert(line, index, [genColumn]);
|
|
||||||
}
|
|
||||||
const sourcesIndex = setArray.put(sources, source);
|
|
||||||
const namesIndex = name ? setArray.put(names, name) : NO_NAME;
|
|
||||||
if (sourcesIndex === sourcesContent.length)
|
|
||||||
sourcesContent[sourcesIndex] = content !== null && content !== void 0 ? content : null;
|
|
||||||
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
return insert(line, index, name
|
|
||||||
? [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
|
||||||
: [genColumn, sourcesIndex, sourceLine, sourceColumn]);
|
|
||||||
}
|
|
||||||
function getLine(mappings, index) {
|
|
||||||
for (let i = mappings.length; i <= index; i++) {
|
|
||||||
mappings[i] = [];
|
|
||||||
}
|
|
||||||
return mappings[index];
|
|
||||||
}
|
|
||||||
function getColumnIndex(line, genColumn) {
|
|
||||||
let index = line.length;
|
|
||||||
for (let i = index - 1; i >= 0; index = i--) {
|
|
||||||
const current = line[i];
|
|
||||||
if (genColumn >= current[COLUMN])
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
return index;
|
|
||||||
}
|
|
||||||
function insert(array, index, value) {
|
|
||||||
for (let i = array.length; i > index; i--) {
|
|
||||||
array[i] = array[i - 1];
|
|
||||||
}
|
|
||||||
array[index] = value;
|
|
||||||
}
|
|
||||||
function removeEmptyFinalLines(mappings) {
|
|
||||||
const { length } = mappings;
|
|
||||||
let len = length;
|
|
||||||
for (let i = len - 1; i >= 0; len = i, i--) {
|
|
||||||
if (mappings[i].length > 0)
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (len < length)
|
|
||||||
mappings.length = len;
|
|
||||||
}
|
|
||||||
function putAll(setarr, array) {
|
|
||||||
for (let i = 0; i < array.length; i++)
|
|
||||||
setArray.put(setarr, array[i]);
|
|
||||||
}
|
|
||||||
function skipSourceless(line, index) {
|
|
||||||
// The start of a line is already sourceless, so adding a sourceless segment to the beginning
|
|
||||||
// doesn't generate any useful information.
|
|
||||||
if (index === 0)
|
|
||||||
return true;
|
|
||||||
const prev = line[index - 1];
|
|
||||||
// If the previous segment is also sourceless, then adding another sourceless segment doesn't
|
|
||||||
// genrate any new information. Else, this segment will end the source/named segment and point to
|
|
||||||
// a sourceless position, which is useful.
|
|
||||||
return prev.length === 1;
|
|
||||||
}
|
|
||||||
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
|
|
||||||
// A source/named segment at the start of a line gives position at that genColumn
|
|
||||||
if (index === 0)
|
|
||||||
return false;
|
|
||||||
const prev = line[index - 1];
|
|
||||||
// If the previous segment is sourceless, then we're transitioning to a source.
|
|
||||||
if (prev.length === 1)
|
|
||||||
return false;
|
|
||||||
// If the previous segment maps to the exact same source position, then this segment doesn't
|
|
||||||
// provide any new position information.
|
|
||||||
return (sourcesIndex === prev[SOURCES_INDEX] &&
|
|
||||||
sourceLine === prev[SOURCE_LINE] &&
|
|
||||||
sourceColumn === prev[SOURCE_COLUMN] &&
|
|
||||||
namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME));
|
|
||||||
}
|
|
||||||
function addMappingInternal(skipable, map, mapping) {
|
|
||||||
const { generated, source, original, name, content } = mapping;
|
|
||||||
if (!source) {
|
|
||||||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, null, null, null, null, null);
|
|
||||||
}
|
|
||||||
return addSegmentInternal(skipable, map, generated.line - 1, generated.column, source, original.line - 1, original.column, name, content);
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.GenMapping = GenMapping;
|
|
||||||
exports.addMapping = addMapping;
|
|
||||||
exports.addSegment = addSegment;
|
|
||||||
exports.allMappings = allMappings;
|
|
||||||
exports.fromMap = fromMap;
|
|
||||||
exports.maybeAddMapping = maybeAddMapping;
|
|
||||||
exports.maybeAddSegment = maybeAddSegment;
|
|
||||||
exports.setIgnore = setIgnore;
|
|
||||||
exports.setSourceContent = setSourceContent;
|
|
||||||
exports.toDecodedMap = toDecodedMap;
|
|
||||||
exports.toEncodedMap = toEncodedMap;
|
|
||||||
|
|
||||||
Object.defineProperty(exports, '__esModule', { value: true });
|
|
||||||
|
|
||||||
}));
|
|
||||||
//# sourceMappingURL=gen-mapping.umd.js.map
|
|
||||||
1
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js.map
generated
vendored
1
node_modules/@jridgewell/gen-mapping/dist/gen-mapping.umd.js.map
generated
vendored
File diff suppressed because one or more lines are too long
88
node_modules/@jridgewell/gen-mapping/dist/types/gen-mapping.d.ts
generated
vendored
88
node_modules/@jridgewell/gen-mapping/dist/types/gen-mapping.d.ts
generated
vendored
@ -1,88 +0,0 @@
|
|||||||
import type { SourceMapInput } from '@jridgewell/trace-mapping';
|
|
||||||
import type { DecodedSourceMap, EncodedSourceMap, Pos, Mapping } from './types';
|
|
||||||
export type { DecodedSourceMap, EncodedSourceMap, Mapping };
|
|
||||||
export declare type Options = {
|
|
||||||
file?: string | null;
|
|
||||||
sourceRoot?: string | null;
|
|
||||||
};
|
|
||||||
/**
|
|
||||||
* Provides the state to generate a sourcemap.
|
|
||||||
*/
|
|
||||||
export declare class GenMapping {
|
|
||||||
private _names;
|
|
||||||
private _sources;
|
|
||||||
private _sourcesContent;
|
|
||||||
private _mappings;
|
|
||||||
private _ignoreList;
|
|
||||||
file: string | null | undefined;
|
|
||||||
sourceRoot: string | null | undefined;
|
|
||||||
constructor({ file, sourceRoot }?: Options);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* A low-level API to associate a generated position with an original source position. Line and
|
|
||||||
* column here are 0-based, unlike `addMapping`.
|
|
||||||
*/
|
|
||||||
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source?: null, sourceLine?: null, sourceColumn?: null, name?: null, content?: null): void;
|
|
||||||
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name?: null, content?: string | null): void;
|
|
||||||
export declare function addSegment(map: GenMapping, genLine: number, genColumn: number, source: string, sourceLine: number, sourceColumn: number, name: string, content?: string | null): void;
|
|
||||||
/**
|
|
||||||
* A high-level API to associate a generated position with an original source position. Line is
|
|
||||||
* 1-based, but column is 0-based, due to legacy behavior in `source-map` library.
|
|
||||||
*/
|
|
||||||
export declare function addMapping(map: GenMapping, mapping: {
|
|
||||||
generated: Pos;
|
|
||||||
source?: null;
|
|
||||||
original?: null;
|
|
||||||
name?: null;
|
|
||||||
content?: null;
|
|
||||||
}): void;
|
|
||||||
export declare function addMapping(map: GenMapping, mapping: {
|
|
||||||
generated: Pos;
|
|
||||||
source: string;
|
|
||||||
original: Pos;
|
|
||||||
name?: null;
|
|
||||||
content?: string | null;
|
|
||||||
}): void;
|
|
||||||
export declare function addMapping(map: GenMapping, mapping: {
|
|
||||||
generated: Pos;
|
|
||||||
source: string;
|
|
||||||
original: Pos;
|
|
||||||
name: string;
|
|
||||||
content?: string | null;
|
|
||||||
}): void;
|
|
||||||
/**
|
|
||||||
* Same as `addSegment`, but will only add the segment if it generates useful information in the
|
|
||||||
* resulting map. This only works correctly if segments are added **in order**, meaning you should
|
|
||||||
* not add a segment with a lower generated line/column than one that came before.
|
|
||||||
*/
|
|
||||||
export declare const maybeAddSegment: typeof addSegment;
|
|
||||||
/**
|
|
||||||
* Same as `addMapping`, but will only add the mapping if it generates useful information in the
|
|
||||||
* resulting map. This only works correctly if mappings are added **in order**, meaning you should
|
|
||||||
* not add a mapping with a lower generated line/column than one that came before.
|
|
||||||
*/
|
|
||||||
export declare const maybeAddMapping: typeof addMapping;
|
|
||||||
/**
|
|
||||||
* Adds/removes the content of the source file to the source map.
|
|
||||||
*/
|
|
||||||
export declare function setSourceContent(map: GenMapping, source: string, content: string | null): void;
|
|
||||||
export declare function setIgnore(map: GenMapping, source: string, ignore?: boolean): void;
|
|
||||||
/**
|
|
||||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
|
||||||
* a sourcemap, or to JSON.stringify.
|
|
||||||
*/
|
|
||||||
export declare function toDecodedMap(map: GenMapping): DecodedSourceMap;
|
|
||||||
/**
|
|
||||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
|
||||||
* a sourcemap, or to JSON.stringify.
|
|
||||||
*/
|
|
||||||
export declare function toEncodedMap(map: GenMapping): EncodedSourceMap;
|
|
||||||
/**
|
|
||||||
* Constructs a new GenMapping, using the already present mappings of the input.
|
|
||||||
*/
|
|
||||||
export declare function fromMap(input: SourceMapInput): GenMapping;
|
|
||||||
/**
|
|
||||||
* Returns an array of high-level mapping objects for every recorded segment, which could then be
|
|
||||||
* passed to the `source-map` library.
|
|
||||||
*/
|
|
||||||
export declare function allMappings(map: GenMapping): Mapping[];
|
|
||||||
12
node_modules/@jridgewell/gen-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
12
node_modules/@jridgewell/gen-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
@ -1,12 +0,0 @@
|
|||||||
declare type GeneratedColumn = number;
|
|
||||||
declare type SourcesIndex = number;
|
|
||||||
declare type SourceLine = number;
|
|
||||||
declare type SourceColumn = number;
|
|
||||||
declare type NamesIndex = number;
|
|
||||||
export declare type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
|
||||||
export declare const COLUMN = 0;
|
|
||||||
export declare const SOURCES_INDEX = 1;
|
|
||||||
export declare const SOURCE_LINE = 2;
|
|
||||||
export declare const SOURCE_COLUMN = 3;
|
|
||||||
export declare const NAMES_INDEX = 4;
|
|
||||||
export {};
|
|
||||||
36
node_modules/@jridgewell/gen-mapping/dist/types/types.d.ts
generated
vendored
36
node_modules/@jridgewell/gen-mapping/dist/types/types.d.ts
generated
vendored
@ -1,36 +0,0 @@
|
|||||||
import type { SourceMapSegment } from './sourcemap-segment';
|
|
||||||
export interface SourceMapV3 {
|
|
||||||
file?: string | null;
|
|
||||||
names: readonly string[];
|
|
||||||
sourceRoot?: string;
|
|
||||||
sources: readonly (string | null)[];
|
|
||||||
sourcesContent?: readonly (string | null)[];
|
|
||||||
version: 3;
|
|
||||||
ignoreList?: readonly number[];
|
|
||||||
}
|
|
||||||
export interface EncodedSourceMap extends SourceMapV3 {
|
|
||||||
mappings: string;
|
|
||||||
}
|
|
||||||
export interface DecodedSourceMap extends SourceMapV3 {
|
|
||||||
mappings: readonly SourceMapSegment[][];
|
|
||||||
}
|
|
||||||
export interface Pos {
|
|
||||||
line: number;
|
|
||||||
column: number;
|
|
||||||
}
|
|
||||||
export declare type Mapping = {
|
|
||||||
generated: Pos;
|
|
||||||
source: undefined;
|
|
||||||
original: undefined;
|
|
||||||
name: undefined;
|
|
||||||
} | {
|
|
||||||
generated: Pos;
|
|
||||||
source: string;
|
|
||||||
original: Pos;
|
|
||||||
name: string;
|
|
||||||
} | {
|
|
||||||
generated: Pos;
|
|
||||||
source: string;
|
|
||||||
original: Pos;
|
|
||||||
name: undefined;
|
|
||||||
};
|
|
||||||
76
node_modules/@jridgewell/gen-mapping/package.json
generated
vendored
76
node_modules/@jridgewell/gen-mapping/package.json
generated
vendored
@ -1,76 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@jridgewell/gen-mapping",
|
|
||||||
"version": "0.3.8",
|
|
||||||
"description": "Generate source maps",
|
|
||||||
"keywords": [
|
|
||||||
"source",
|
|
||||||
"map"
|
|
||||||
],
|
|
||||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
|
||||||
"license": "MIT",
|
|
||||||
"repository": "https://github.com/jridgewell/gen-mapping",
|
|
||||||
"main": "dist/gen-mapping.umd.js",
|
|
||||||
"module": "dist/gen-mapping.mjs",
|
|
||||||
"types": "dist/types/gen-mapping.d.ts",
|
|
||||||
"exports": {
|
|
||||||
".": [
|
|
||||||
{
|
|
||||||
"types": "./dist/types/gen-mapping.d.ts",
|
|
||||||
"browser": "./dist/gen-mapping.umd.js",
|
|
||||||
"require": "./dist/gen-mapping.umd.js",
|
|
||||||
"import": "./dist/gen-mapping.mjs"
|
|
||||||
},
|
|
||||||
"./dist/gen-mapping.umd.js"
|
|
||||||
],
|
|
||||||
"./package.json": "./package.json"
|
|
||||||
},
|
|
||||||
"files": [
|
|
||||||
"dist"
|
|
||||||
],
|
|
||||||
"engines": {
|
|
||||||
"node": ">=6.0.0"
|
|
||||||
},
|
|
||||||
"scripts": {
|
|
||||||
"benchmark": "run-s build:rollup benchmark:*",
|
|
||||||
"benchmark:install": "cd benchmark && npm install",
|
|
||||||
"benchmark:only": "node benchmark/index.mjs",
|
|
||||||
"prebuild": "rm -rf dist",
|
|
||||||
"build": "run-s -n build:*",
|
|
||||||
"build:rollup": "rollup -c rollup.config.js",
|
|
||||||
"build:ts": "tsc --project tsconfig.build.json",
|
|
||||||
"lint": "run-s -n lint:*",
|
|
||||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
|
||||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
|
||||||
"test": "run-s -n test:lint test:only",
|
|
||||||
"test:debug": "mocha --inspect-brk",
|
|
||||||
"test:lint": "run-s -n test:lint:*",
|
|
||||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
|
||||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
|
||||||
"test:only": "c8 mocha",
|
|
||||||
"test:watch": "mocha --watch",
|
|
||||||
"prepublishOnly": "npm run preversion",
|
|
||||||
"preversion": "run-s test build"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@rollup/plugin-typescript": "8.3.2",
|
|
||||||
"@types/mocha": "9.1.1",
|
|
||||||
"@types/node": "17.0.29",
|
|
||||||
"@typescript-eslint/eslint-plugin": "5.21.0",
|
|
||||||
"@typescript-eslint/parser": "5.21.0",
|
|
||||||
"benchmark": "2.1.4",
|
|
||||||
"c8": "7.11.2",
|
|
||||||
"eslint": "8.14.0",
|
|
||||||
"eslint-config-prettier": "8.5.0",
|
|
||||||
"mocha": "9.2.2",
|
|
||||||
"npm-run-all": "4.1.5",
|
|
||||||
"prettier": "2.6.2",
|
|
||||||
"rollup": "2.70.2",
|
|
||||||
"tsx": "4.7.1",
|
|
||||||
"typescript": "4.6.3"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@jridgewell/set-array": "^1.2.1",
|
|
||||||
"@jridgewell/sourcemap-codec": "^1.4.10",
|
|
||||||
"@jridgewell/trace-mapping": "^0.3.24"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
19
node_modules/@jridgewell/resolve-uri/LICENSE
generated
vendored
19
node_modules/@jridgewell/resolve-uri/LICENSE
generated
vendored
@ -1,19 +0,0 @@
|
|||||||
Copyright 2019 Justin Ridgewell <jridgewell@google.com>
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
|
||||||
all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
40
node_modules/@jridgewell/resolve-uri/README.md
generated
vendored
40
node_modules/@jridgewell/resolve-uri/README.md
generated
vendored
@ -1,40 +0,0 @@
|
|||||||
# @jridgewell/resolve-uri
|
|
||||||
|
|
||||||
> Resolve a URI relative to an optional base URI
|
|
||||||
|
|
||||||
Resolve any combination of absolute URIs, protocol-realtive URIs, absolute paths, or relative paths.
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
```sh
|
|
||||||
npm install @jridgewell/resolve-uri
|
|
||||||
```
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
function resolve(input: string, base?: string): string;
|
|
||||||
```
|
|
||||||
|
|
||||||
```js
|
|
||||||
import resolve from '@jridgewell/resolve-uri';
|
|
||||||
|
|
||||||
resolve('foo', 'https://example.com'); // => 'https://example.com/foo'
|
|
||||||
```
|
|
||||||
|
|
||||||
| Input | Base | Resolution | Explanation |
|
|
||||||
|-----------------------|-------------------------|--------------------------------|--------------------------------------------------------------|
|
|
||||||
| `https://example.com` | _any_ | `https://example.com/` | Input is normalized only |
|
|
||||||
| `//example.com` | `https://base.com/` | `https://example.com/` | Input inherits the base's protocol |
|
|
||||||
| `//example.com` | _rest_ | `//example.com/` | Input is normalized only |
|
|
||||||
| `/example` | `https://base.com/` | `https://base.com/example` | Input inherits the base's origin |
|
|
||||||
| `/example` | `//base.com/` | `//base.com/example` | Input inherits the base's host and remains protocol relative |
|
|
||||||
| `/example` | _rest_ | `/example` | Input is normalized only |
|
|
||||||
| `example` | `https://base.com/dir/` | `https://base.com/dir/example` | Input is joined with the base |
|
|
||||||
| `example` | `https://base.com/file` | `https://base.com/example` | Input is joined with the base without its file |
|
|
||||||
| `example` | `//base.com/dir/` | `//base.com/dir/example` | Input is joined with the base's last directory |
|
|
||||||
| `example` | `//base.com/file` | `//base.com/example` | Input is joined with the base without its file |
|
|
||||||
| `example` | `/base/dir/` | `/base/dir/example` | Input is joined with the base's last directory |
|
|
||||||
| `example` | `/base/file` | `/base/example` | Input is joined with the base without its file |
|
|
||||||
| `example` | `base/dir/` | `base/dir/example` | Input is joined with the base's last directory |
|
|
||||||
| `example` | `base/file` | `base/example` | Input is joined with the base without its file |
|
|
||||||
232
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs
generated
vendored
232
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs
generated
vendored
@ -1,232 +0,0 @@
|
|||||||
// Matches the scheme of a URL, eg "http://"
|
|
||||||
const schemeRegex = /^[\w+.-]+:\/\//;
|
|
||||||
/**
|
|
||||||
* Matches the parts of a URL:
|
|
||||||
* 1. Scheme, including ":", guaranteed.
|
|
||||||
* 2. User/password, including "@", optional.
|
|
||||||
* 3. Host, guaranteed.
|
|
||||||
* 4. Port, including ":", optional.
|
|
||||||
* 5. Path, including "/", optional.
|
|
||||||
* 6. Query, including "?", optional.
|
|
||||||
* 7. Hash, including "#", optional.
|
|
||||||
*/
|
|
||||||
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
|
|
||||||
/**
|
|
||||||
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
|
|
||||||
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
|
|
||||||
*
|
|
||||||
* 1. Host, optional.
|
|
||||||
* 2. Path, which may include "/", guaranteed.
|
|
||||||
* 3. Query, including "?", optional.
|
|
||||||
* 4. Hash, including "#", optional.
|
|
||||||
*/
|
|
||||||
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
|
|
||||||
function isAbsoluteUrl(input) {
|
|
||||||
return schemeRegex.test(input);
|
|
||||||
}
|
|
||||||
function isSchemeRelativeUrl(input) {
|
|
||||||
return input.startsWith('//');
|
|
||||||
}
|
|
||||||
function isAbsolutePath(input) {
|
|
||||||
return input.startsWith('/');
|
|
||||||
}
|
|
||||||
function isFileUrl(input) {
|
|
||||||
return input.startsWith('file:');
|
|
||||||
}
|
|
||||||
function isRelative(input) {
|
|
||||||
return /^[.?#]/.test(input);
|
|
||||||
}
|
|
||||||
function parseAbsoluteUrl(input) {
|
|
||||||
const match = urlRegex.exec(input);
|
|
||||||
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
|
|
||||||
}
|
|
||||||
function parseFileUrl(input) {
|
|
||||||
const match = fileRegex.exec(input);
|
|
||||||
const path = match[2];
|
|
||||||
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
|
|
||||||
}
|
|
||||||
function makeUrl(scheme, user, host, port, path, query, hash) {
|
|
||||||
return {
|
|
||||||
scheme,
|
|
||||||
user,
|
|
||||||
host,
|
|
||||||
port,
|
|
||||||
path,
|
|
||||||
query,
|
|
||||||
hash,
|
|
||||||
type: 7 /* Absolute */,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
function parseUrl(input) {
|
|
||||||
if (isSchemeRelativeUrl(input)) {
|
|
||||||
const url = parseAbsoluteUrl('http:' + input);
|
|
||||||
url.scheme = '';
|
|
||||||
url.type = 6 /* SchemeRelative */;
|
|
||||||
return url;
|
|
||||||
}
|
|
||||||
if (isAbsolutePath(input)) {
|
|
||||||
const url = parseAbsoluteUrl('http://foo.com' + input);
|
|
||||||
url.scheme = '';
|
|
||||||
url.host = '';
|
|
||||||
url.type = 5 /* AbsolutePath */;
|
|
||||||
return url;
|
|
||||||
}
|
|
||||||
if (isFileUrl(input))
|
|
||||||
return parseFileUrl(input);
|
|
||||||
if (isAbsoluteUrl(input))
|
|
||||||
return parseAbsoluteUrl(input);
|
|
||||||
const url = parseAbsoluteUrl('http://foo.com/' + input);
|
|
||||||
url.scheme = '';
|
|
||||||
url.host = '';
|
|
||||||
url.type = input
|
|
||||||
? input.startsWith('?')
|
|
||||||
? 3 /* Query */
|
|
||||||
: input.startsWith('#')
|
|
||||||
? 2 /* Hash */
|
|
||||||
: 4 /* RelativePath */
|
|
||||||
: 1 /* Empty */;
|
|
||||||
return url;
|
|
||||||
}
|
|
||||||
function stripPathFilename(path) {
|
|
||||||
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
|
||||||
// paths. It's not a file, so we can't strip it.
|
|
||||||
if (path.endsWith('/..'))
|
|
||||||
return path;
|
|
||||||
const index = path.lastIndexOf('/');
|
|
||||||
return path.slice(0, index + 1);
|
|
||||||
}
|
|
||||||
function mergePaths(url, base) {
|
|
||||||
normalizePath(base, base.type);
|
|
||||||
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
|
||||||
// path).
|
|
||||||
if (url.path === '/') {
|
|
||||||
url.path = base.path;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// Resolution happens relative to the base path's directory, not the file.
|
|
||||||
url.path = stripPathFilename(base.path) + url.path;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
|
|
||||||
* "foo/.". We need to normalize to a standard representation.
|
|
||||||
*/
|
|
||||||
function normalizePath(url, type) {
|
|
||||||
const rel = type <= 4 /* RelativePath */;
|
|
||||||
const pieces = url.path.split('/');
|
|
||||||
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
|
||||||
// pieces[0] is an empty string.
|
|
||||||
let pointer = 1;
|
|
||||||
// Positive is the number of real directories we've output, used for popping a parent directory.
|
|
||||||
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
|
||||||
let positive = 0;
|
|
||||||
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
|
||||||
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
|
||||||
// real directory, we won't need to append, unless the other conditions happen again.
|
|
||||||
let addTrailingSlash = false;
|
|
||||||
for (let i = 1; i < pieces.length; i++) {
|
|
||||||
const piece = pieces[i];
|
|
||||||
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
|
||||||
if (!piece) {
|
|
||||||
addTrailingSlash = true;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
// If we encounter a real directory, then we don't need to append anymore.
|
|
||||||
addTrailingSlash = false;
|
|
||||||
// A current directory, which we can always drop.
|
|
||||||
if (piece === '.')
|
|
||||||
continue;
|
|
||||||
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
|
||||||
// have an excess of parents, and we'll need to keep the "..".
|
|
||||||
if (piece === '..') {
|
|
||||||
if (positive) {
|
|
||||||
addTrailingSlash = true;
|
|
||||||
positive--;
|
|
||||||
pointer--;
|
|
||||||
}
|
|
||||||
else if (rel) {
|
|
||||||
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
|
||||||
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
|
||||||
pieces[pointer++] = piece;
|
|
||||||
}
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
|
||||||
// any popped or dropped directories.
|
|
||||||
pieces[pointer++] = piece;
|
|
||||||
positive++;
|
|
||||||
}
|
|
||||||
let path = '';
|
|
||||||
for (let i = 1; i < pointer; i++) {
|
|
||||||
path += '/' + pieces[i];
|
|
||||||
}
|
|
||||||
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
|
|
||||||
path += '/';
|
|
||||||
}
|
|
||||||
url.path = path;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Attempts to resolve `input` URL/path relative to `base`.
|
|
||||||
*/
|
|
||||||
function resolve(input, base) {
|
|
||||||
if (!input && !base)
|
|
||||||
return '';
|
|
||||||
const url = parseUrl(input);
|
|
||||||
let inputType = url.type;
|
|
||||||
if (base && inputType !== 7 /* Absolute */) {
|
|
||||||
const baseUrl = parseUrl(base);
|
|
||||||
const baseType = baseUrl.type;
|
|
||||||
switch (inputType) {
|
|
||||||
case 1 /* Empty */:
|
|
||||||
url.hash = baseUrl.hash;
|
|
||||||
// fall through
|
|
||||||
case 2 /* Hash */:
|
|
||||||
url.query = baseUrl.query;
|
|
||||||
// fall through
|
|
||||||
case 3 /* Query */:
|
|
||||||
case 4 /* RelativePath */:
|
|
||||||
mergePaths(url, baseUrl);
|
|
||||||
// fall through
|
|
||||||
case 5 /* AbsolutePath */:
|
|
||||||
// The host, user, and port are joined, you can't copy one without the others.
|
|
||||||
url.user = baseUrl.user;
|
|
||||||
url.host = baseUrl.host;
|
|
||||||
url.port = baseUrl.port;
|
|
||||||
// fall through
|
|
||||||
case 6 /* SchemeRelative */:
|
|
||||||
// The input doesn't have a schema at least, so we need to copy at least that over.
|
|
||||||
url.scheme = baseUrl.scheme;
|
|
||||||
}
|
|
||||||
if (baseType > inputType)
|
|
||||||
inputType = baseType;
|
|
||||||
}
|
|
||||||
normalizePath(url, inputType);
|
|
||||||
const queryHash = url.query + url.hash;
|
|
||||||
switch (inputType) {
|
|
||||||
// This is impossible, because of the empty checks at the start of the function.
|
|
||||||
// case UrlType.Empty:
|
|
||||||
case 2 /* Hash */:
|
|
||||||
case 3 /* Query */:
|
|
||||||
return queryHash;
|
|
||||||
case 4 /* RelativePath */: {
|
|
||||||
// The first char is always a "/", and we need it to be relative.
|
|
||||||
const path = url.path.slice(1);
|
|
||||||
if (!path)
|
|
||||||
return queryHash || '.';
|
|
||||||
if (isRelative(base || input) && !isRelative(path)) {
|
|
||||||
// If base started with a leading ".", or there is no base and input started with a ".",
|
|
||||||
// then we need to ensure that the relative path starts with a ".". We don't know if
|
|
||||||
// relative starts with a "..", though, so check before prepending.
|
|
||||||
return './' + path + queryHash;
|
|
||||||
}
|
|
||||||
return path + queryHash;
|
|
||||||
}
|
|
||||||
case 5 /* AbsolutePath */:
|
|
||||||
return url.path + queryHash;
|
|
||||||
default:
|
|
||||||
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export { resolve as default };
|
|
||||||
//# sourceMappingURL=resolve-uri.mjs.map
|
|
||||||
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs.map
generated
vendored
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs.map
generated
vendored
File diff suppressed because one or more lines are too long
240
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js
generated
vendored
240
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js
generated
vendored
@ -1,240 +0,0 @@
|
|||||||
(function (global, factory) {
|
|
||||||
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
|
|
||||||
typeof define === 'function' && define.amd ? define(factory) :
|
|
||||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.resolveURI = factory());
|
|
||||||
})(this, (function () { 'use strict';
|
|
||||||
|
|
||||||
// Matches the scheme of a URL, eg "http://"
|
|
||||||
const schemeRegex = /^[\w+.-]+:\/\//;
|
|
||||||
/**
|
|
||||||
* Matches the parts of a URL:
|
|
||||||
* 1. Scheme, including ":", guaranteed.
|
|
||||||
* 2. User/password, including "@", optional.
|
|
||||||
* 3. Host, guaranteed.
|
|
||||||
* 4. Port, including ":", optional.
|
|
||||||
* 5. Path, including "/", optional.
|
|
||||||
* 6. Query, including "?", optional.
|
|
||||||
* 7. Hash, including "#", optional.
|
|
||||||
*/
|
|
||||||
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
|
|
||||||
/**
|
|
||||||
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
|
|
||||||
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
|
|
||||||
*
|
|
||||||
* 1. Host, optional.
|
|
||||||
* 2. Path, which may include "/", guaranteed.
|
|
||||||
* 3. Query, including "?", optional.
|
|
||||||
* 4. Hash, including "#", optional.
|
|
||||||
*/
|
|
||||||
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
|
|
||||||
function isAbsoluteUrl(input) {
|
|
||||||
return schemeRegex.test(input);
|
|
||||||
}
|
|
||||||
function isSchemeRelativeUrl(input) {
|
|
||||||
return input.startsWith('//');
|
|
||||||
}
|
|
||||||
function isAbsolutePath(input) {
|
|
||||||
return input.startsWith('/');
|
|
||||||
}
|
|
||||||
function isFileUrl(input) {
|
|
||||||
return input.startsWith('file:');
|
|
||||||
}
|
|
||||||
function isRelative(input) {
|
|
||||||
return /^[.?#]/.test(input);
|
|
||||||
}
|
|
||||||
function parseAbsoluteUrl(input) {
|
|
||||||
const match = urlRegex.exec(input);
|
|
||||||
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
|
|
||||||
}
|
|
||||||
function parseFileUrl(input) {
|
|
||||||
const match = fileRegex.exec(input);
|
|
||||||
const path = match[2];
|
|
||||||
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
|
|
||||||
}
|
|
||||||
function makeUrl(scheme, user, host, port, path, query, hash) {
|
|
||||||
return {
|
|
||||||
scheme,
|
|
||||||
user,
|
|
||||||
host,
|
|
||||||
port,
|
|
||||||
path,
|
|
||||||
query,
|
|
||||||
hash,
|
|
||||||
type: 7 /* Absolute */,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
function parseUrl(input) {
|
|
||||||
if (isSchemeRelativeUrl(input)) {
|
|
||||||
const url = parseAbsoluteUrl('http:' + input);
|
|
||||||
url.scheme = '';
|
|
||||||
url.type = 6 /* SchemeRelative */;
|
|
||||||
return url;
|
|
||||||
}
|
|
||||||
if (isAbsolutePath(input)) {
|
|
||||||
const url = parseAbsoluteUrl('http://foo.com' + input);
|
|
||||||
url.scheme = '';
|
|
||||||
url.host = '';
|
|
||||||
url.type = 5 /* AbsolutePath */;
|
|
||||||
return url;
|
|
||||||
}
|
|
||||||
if (isFileUrl(input))
|
|
||||||
return parseFileUrl(input);
|
|
||||||
if (isAbsoluteUrl(input))
|
|
||||||
return parseAbsoluteUrl(input);
|
|
||||||
const url = parseAbsoluteUrl('http://foo.com/' + input);
|
|
||||||
url.scheme = '';
|
|
||||||
url.host = '';
|
|
||||||
url.type = input
|
|
||||||
? input.startsWith('?')
|
|
||||||
? 3 /* Query */
|
|
||||||
: input.startsWith('#')
|
|
||||||
? 2 /* Hash */
|
|
||||||
: 4 /* RelativePath */
|
|
||||||
: 1 /* Empty */;
|
|
||||||
return url;
|
|
||||||
}
|
|
||||||
function stripPathFilename(path) {
|
|
||||||
// If a path ends with a parent directory "..", then it's a relative path with excess parent
|
|
||||||
// paths. It's not a file, so we can't strip it.
|
|
||||||
if (path.endsWith('/..'))
|
|
||||||
return path;
|
|
||||||
const index = path.lastIndexOf('/');
|
|
||||||
return path.slice(0, index + 1);
|
|
||||||
}
|
|
||||||
function mergePaths(url, base) {
|
|
||||||
normalizePath(base, base.type);
|
|
||||||
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
|
|
||||||
// path).
|
|
||||||
if (url.path === '/') {
|
|
||||||
url.path = base.path;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// Resolution happens relative to the base path's directory, not the file.
|
|
||||||
url.path = stripPathFilename(base.path) + url.path;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
|
|
||||||
* "foo/.". We need to normalize to a standard representation.
|
|
||||||
*/
|
|
||||||
function normalizePath(url, type) {
|
|
||||||
const rel = type <= 4 /* RelativePath */;
|
|
||||||
const pieces = url.path.split('/');
|
|
||||||
// We need to preserve the first piece always, so that we output a leading slash. The item at
|
|
||||||
// pieces[0] is an empty string.
|
|
||||||
let pointer = 1;
|
|
||||||
// Positive is the number of real directories we've output, used for popping a parent directory.
|
|
||||||
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
|
|
||||||
let positive = 0;
|
|
||||||
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
|
|
||||||
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
|
|
||||||
// real directory, we won't need to append, unless the other conditions happen again.
|
|
||||||
let addTrailingSlash = false;
|
|
||||||
for (let i = 1; i < pieces.length; i++) {
|
|
||||||
const piece = pieces[i];
|
|
||||||
// An empty directory, could be a trailing slash, or just a double "//" in the path.
|
|
||||||
if (!piece) {
|
|
||||||
addTrailingSlash = true;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
// If we encounter a real directory, then we don't need to append anymore.
|
|
||||||
addTrailingSlash = false;
|
|
||||||
// A current directory, which we can always drop.
|
|
||||||
if (piece === '.')
|
|
||||||
continue;
|
|
||||||
// A parent directory, we need to see if there are any real directories we can pop. Else, we
|
|
||||||
// have an excess of parents, and we'll need to keep the "..".
|
|
||||||
if (piece === '..') {
|
|
||||||
if (positive) {
|
|
||||||
addTrailingSlash = true;
|
|
||||||
positive--;
|
|
||||||
pointer--;
|
|
||||||
}
|
|
||||||
else if (rel) {
|
|
||||||
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
|
|
||||||
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
|
|
||||||
pieces[pointer++] = piece;
|
|
||||||
}
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
|
|
||||||
// any popped or dropped directories.
|
|
||||||
pieces[pointer++] = piece;
|
|
||||||
positive++;
|
|
||||||
}
|
|
||||||
let path = '';
|
|
||||||
for (let i = 1; i < pointer; i++) {
|
|
||||||
path += '/' + pieces[i];
|
|
||||||
}
|
|
||||||
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
|
|
||||||
path += '/';
|
|
||||||
}
|
|
||||||
url.path = path;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Attempts to resolve `input` URL/path relative to `base`.
|
|
||||||
*/
|
|
||||||
function resolve(input, base) {
|
|
||||||
if (!input && !base)
|
|
||||||
return '';
|
|
||||||
const url = parseUrl(input);
|
|
||||||
let inputType = url.type;
|
|
||||||
if (base && inputType !== 7 /* Absolute */) {
|
|
||||||
const baseUrl = parseUrl(base);
|
|
||||||
const baseType = baseUrl.type;
|
|
||||||
switch (inputType) {
|
|
||||||
case 1 /* Empty */:
|
|
||||||
url.hash = baseUrl.hash;
|
|
||||||
// fall through
|
|
||||||
case 2 /* Hash */:
|
|
||||||
url.query = baseUrl.query;
|
|
||||||
// fall through
|
|
||||||
case 3 /* Query */:
|
|
||||||
case 4 /* RelativePath */:
|
|
||||||
mergePaths(url, baseUrl);
|
|
||||||
// fall through
|
|
||||||
case 5 /* AbsolutePath */:
|
|
||||||
// The host, user, and port are joined, you can't copy one without the others.
|
|
||||||
url.user = baseUrl.user;
|
|
||||||
url.host = baseUrl.host;
|
|
||||||
url.port = baseUrl.port;
|
|
||||||
// fall through
|
|
||||||
case 6 /* SchemeRelative */:
|
|
||||||
// The input doesn't have a schema at least, so we need to copy at least that over.
|
|
||||||
url.scheme = baseUrl.scheme;
|
|
||||||
}
|
|
||||||
if (baseType > inputType)
|
|
||||||
inputType = baseType;
|
|
||||||
}
|
|
||||||
normalizePath(url, inputType);
|
|
||||||
const queryHash = url.query + url.hash;
|
|
||||||
switch (inputType) {
|
|
||||||
// This is impossible, because of the empty checks at the start of the function.
|
|
||||||
// case UrlType.Empty:
|
|
||||||
case 2 /* Hash */:
|
|
||||||
case 3 /* Query */:
|
|
||||||
return queryHash;
|
|
||||||
case 4 /* RelativePath */: {
|
|
||||||
// The first char is always a "/", and we need it to be relative.
|
|
||||||
const path = url.path.slice(1);
|
|
||||||
if (!path)
|
|
||||||
return queryHash || '.';
|
|
||||||
if (isRelative(base || input) && !isRelative(path)) {
|
|
||||||
// If base started with a leading ".", or there is no base and input started with a ".",
|
|
||||||
// then we need to ensure that the relative path starts with a ".". We don't know if
|
|
||||||
// relative starts with a "..", though, so check before prepending.
|
|
||||||
return './' + path + queryHash;
|
|
||||||
}
|
|
||||||
return path + queryHash;
|
|
||||||
}
|
|
||||||
case 5 /* AbsolutePath */:
|
|
||||||
return url.path + queryHash;
|
|
||||||
default:
|
|
||||||
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return resolve;
|
|
||||||
|
|
||||||
}));
|
|
||||||
//# sourceMappingURL=resolve-uri.umd.js.map
|
|
||||||
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js.map
generated
vendored
1
node_modules/@jridgewell/resolve-uri/dist/resolve-uri.umd.js.map
generated
vendored
File diff suppressed because one or more lines are too long
4
node_modules/@jridgewell/resolve-uri/dist/types/resolve-uri.d.ts
generated
vendored
4
node_modules/@jridgewell/resolve-uri/dist/types/resolve-uri.d.ts
generated
vendored
@ -1,4 +0,0 @@
|
|||||||
/**
|
|
||||||
* Attempts to resolve `input` URL/path relative to `base`.
|
|
||||||
*/
|
|
||||||
export default function resolve(input: string, base: string | undefined): string;
|
|
||||||
69
node_modules/@jridgewell/resolve-uri/package.json
generated
vendored
69
node_modules/@jridgewell/resolve-uri/package.json
generated
vendored
@ -1,69 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@jridgewell/resolve-uri",
|
|
||||||
"version": "3.1.2",
|
|
||||||
"description": "Resolve a URI relative to an optional base URI",
|
|
||||||
"keywords": [
|
|
||||||
"resolve",
|
|
||||||
"uri",
|
|
||||||
"url",
|
|
||||||
"path"
|
|
||||||
],
|
|
||||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
|
||||||
"license": "MIT",
|
|
||||||
"repository": "https://github.com/jridgewell/resolve-uri",
|
|
||||||
"main": "dist/resolve-uri.umd.js",
|
|
||||||
"module": "dist/resolve-uri.mjs",
|
|
||||||
"types": "dist/types/resolve-uri.d.ts",
|
|
||||||
"exports": {
|
|
||||||
".": [
|
|
||||||
{
|
|
||||||
"types": "./dist/types/resolve-uri.d.ts",
|
|
||||||
"browser": "./dist/resolve-uri.umd.js",
|
|
||||||
"require": "./dist/resolve-uri.umd.js",
|
|
||||||
"import": "./dist/resolve-uri.mjs"
|
|
||||||
},
|
|
||||||
"./dist/resolve-uri.umd.js"
|
|
||||||
],
|
|
||||||
"./package.json": "./package.json"
|
|
||||||
},
|
|
||||||
"files": [
|
|
||||||
"dist"
|
|
||||||
],
|
|
||||||
"engines": {
|
|
||||||
"node": ">=6.0.0"
|
|
||||||
},
|
|
||||||
"scripts": {
|
|
||||||
"prebuild": "rm -rf dist",
|
|
||||||
"build": "run-s -n build:*",
|
|
||||||
"build:rollup": "rollup -c rollup.config.js",
|
|
||||||
"build:ts": "tsc --project tsconfig.build.json",
|
|
||||||
"lint": "run-s -n lint:*",
|
|
||||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
|
||||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
|
||||||
"pretest": "run-s build:rollup",
|
|
||||||
"test": "run-s -n test:lint test:only",
|
|
||||||
"test:debug": "mocha --inspect-brk",
|
|
||||||
"test:lint": "run-s -n test:lint:*",
|
|
||||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
|
||||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
|
||||||
"test:only": "mocha",
|
|
||||||
"test:coverage": "c8 mocha",
|
|
||||||
"test:watch": "mocha --watch",
|
|
||||||
"prepublishOnly": "npm run preversion",
|
|
||||||
"preversion": "run-s test build"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@jridgewell/resolve-uri-latest": "npm:@jridgewell/resolve-uri@*",
|
|
||||||
"@rollup/plugin-typescript": "8.3.0",
|
|
||||||
"@typescript-eslint/eslint-plugin": "5.10.0",
|
|
||||||
"@typescript-eslint/parser": "5.10.0",
|
|
||||||
"c8": "7.11.0",
|
|
||||||
"eslint": "8.7.0",
|
|
||||||
"eslint-config-prettier": "8.3.0",
|
|
||||||
"mocha": "9.2.0",
|
|
||||||
"npm-run-all": "4.1.5",
|
|
||||||
"prettier": "2.5.1",
|
|
||||||
"rollup": "2.66.0",
|
|
||||||
"typescript": "4.5.5"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
19
node_modules/@jridgewell/set-array/LICENSE
generated
vendored
19
node_modules/@jridgewell/set-array/LICENSE
generated
vendored
@ -1,19 +0,0 @@
|
|||||||
Copyright 2022 Justin Ridgewell <jridgewell@google.com>
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
|
||||||
all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
37
node_modules/@jridgewell/set-array/README.md
generated
vendored
37
node_modules/@jridgewell/set-array/README.md
generated
vendored
@ -1,37 +0,0 @@
|
|||||||
# @jridgewell/set-array
|
|
||||||
|
|
||||||
> Like a Set, but provides the index of the `key` in the backing array
|
|
||||||
|
|
||||||
This is designed to allow synchronizing a second array with the contents of the backing array, like
|
|
||||||
how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`, and there
|
|
||||||
are never duplicates.
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
```sh
|
|
||||||
npm install @jridgewell/set-array
|
|
||||||
```
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
```js
|
|
||||||
import { SetArray, get, put, pop } from '@jridgewell/set-array';
|
|
||||||
|
|
||||||
const sa = new SetArray();
|
|
||||||
|
|
||||||
let index = put(sa, 'first');
|
|
||||||
assert.strictEqual(index, 0);
|
|
||||||
|
|
||||||
index = put(sa, 'second');
|
|
||||||
assert.strictEqual(index, 1);
|
|
||||||
|
|
||||||
assert.deepEqual(sa.array, [ 'first', 'second' ]);
|
|
||||||
|
|
||||||
index = get(sa, 'first');
|
|
||||||
assert.strictEqual(index, 0);
|
|
||||||
|
|
||||||
pop(sa);
|
|
||||||
index = get(sa, 'second');
|
|
||||||
assert.strictEqual(index, undefined);
|
|
||||||
assert.deepEqual(sa.array, [ 'first' ]);
|
|
||||||
```
|
|
||||||
69
node_modules/@jridgewell/set-array/dist/set-array.mjs
generated
vendored
69
node_modules/@jridgewell/set-array/dist/set-array.mjs
generated
vendored
@ -1,69 +0,0 @@
|
|||||||
/**
|
|
||||||
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
|
||||||
* index of the `key` in the backing array.
|
|
||||||
*
|
|
||||||
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
|
||||||
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
|
||||||
* and there are never duplicates.
|
|
||||||
*/
|
|
||||||
class SetArray {
|
|
||||||
constructor() {
|
|
||||||
this._indexes = { __proto__: null };
|
|
||||||
this.array = [];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Typescript doesn't allow friend access to private fields, so this just casts the set into a type
|
|
||||||
* with public access modifiers.
|
|
||||||
*/
|
|
||||||
function cast(set) {
|
|
||||||
return set;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Gets the index associated with `key` in the backing array, if it is already present.
|
|
||||||
*/
|
|
||||||
function get(setarr, key) {
|
|
||||||
return cast(setarr)._indexes[key];
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Puts `key` into the backing array, if it is not already present. Returns
|
|
||||||
* the index of the `key` in the backing array.
|
|
||||||
*/
|
|
||||||
function put(setarr, key) {
|
|
||||||
// The key may or may not be present. If it is present, it's a number.
|
|
||||||
const index = get(setarr, key);
|
|
||||||
if (index !== undefined)
|
|
||||||
return index;
|
|
||||||
const { array, _indexes: indexes } = cast(setarr);
|
|
||||||
const length = array.push(key);
|
|
||||||
return (indexes[key] = length - 1);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Pops the last added item out of the SetArray.
|
|
||||||
*/
|
|
||||||
function pop(setarr) {
|
|
||||||
const { array, _indexes: indexes } = cast(setarr);
|
|
||||||
if (array.length === 0)
|
|
||||||
return;
|
|
||||||
const last = array.pop();
|
|
||||||
indexes[last] = undefined;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Removes the key, if it exists in the set.
|
|
||||||
*/
|
|
||||||
function remove(setarr, key) {
|
|
||||||
const index = get(setarr, key);
|
|
||||||
if (index === undefined)
|
|
||||||
return;
|
|
||||||
const { array, _indexes: indexes } = cast(setarr);
|
|
||||||
for (let i = index + 1; i < array.length; i++) {
|
|
||||||
const k = array[i];
|
|
||||||
array[i - 1] = k;
|
|
||||||
indexes[k]--;
|
|
||||||
}
|
|
||||||
indexes[key] = undefined;
|
|
||||||
array.pop();
|
|
||||||
}
|
|
||||||
|
|
||||||
export { SetArray, get, pop, put, remove };
|
|
||||||
//# sourceMappingURL=set-array.mjs.map
|
|
||||||
1
node_modules/@jridgewell/set-array/dist/set-array.mjs.map
generated
vendored
1
node_modules/@jridgewell/set-array/dist/set-array.mjs.map
generated
vendored
@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"set-array.mjs","sources":["../src/set-array.ts"],"sourcesContent":["type Key = string | number | symbol;\n\n/**\n * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the\n * index of the `key` in the backing array.\n *\n * This is designed to allow synchronizing a second array with the contents of the backing array,\n * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,\n * and there are never duplicates.\n */\nexport class SetArray<T extends Key = Key> {\n private declare _indexes: Record<T, number | undefined>;\n declare array: readonly T[];\n\n constructor() {\n this._indexes = { __proto__: null } as any;\n this.array = [];\n }\n}\n\ninterface PublicSet<T extends Key> {\n array: T[];\n _indexes: SetArray<T>['_indexes'];\n}\n\n/**\n * Typescript doesn't allow friend access to private fields, so this just casts the set into a type\n * with public access modifiers.\n */\nfunction cast<T extends Key>(set: SetArray<T>): PublicSet<T> {\n return set as any;\n}\n\n/**\n * Gets the index associated with `key` in the backing array, if it is already present.\n */\nexport function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined {\n return cast(setarr)._indexes[key];\n}\n\n/**\n * Puts `key` into the backing array, if it is not already present. Returns\n * the index of the `key` in the backing array.\n */\nexport function put<T extends Key>(setarr: SetArray<T>, key: T): number {\n // The key may or may not be present. If it is present, it's a number.\n const index = get(setarr, key);\n if (index !== undefined) return index;\n\n const { array, _indexes: indexes } = cast(setarr);\n\n const length = array.push(key);\n return (indexes[key] = length - 1);\n}\n\n/**\n * Pops the last added item out of the SetArray.\n */\nexport function pop<T extends Key>(setarr: SetArray<T>): void {\n const { array, _indexes: indexes } = cast(setarr);\n if (array.length === 0) return;\n\n const last = array.pop()!;\n indexes[last] = undefined;\n}\n\n/**\n * Removes the key, if it exists in the set.\n */\nexport function remove<T extends Key>(setarr: SetArray<T>, key: T): void {\n const index = get(setarr, key);\n if (index === undefined) return;\n\n const { array, _indexes: indexes } = cast(setarr);\n for (let i = index + 1; i < array.length; i++) {\n const k = array[i];\n array[i - 1] = k;\n indexes[k]!--;\n }\n indexes[key] = undefined;\n array.pop();\n}\n"],"names":[],"mappings":"AAEA;;;;;;;;MAQa,QAAQ;IAInB;QACE,IAAI,CAAC,QAAQ,GAAG,EAAE,SAAS,EAAE,IAAI,EAAS,CAAC;QAC3C,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;KACjB;CACF;AAOD;;;;AAIA,SAAS,IAAI,CAAgB,GAAgB;IAC3C,OAAO,GAAU,CAAC;AACpB,CAAC;AAED;;;SAGgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;IAC5D,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;AACpC,CAAC;AAED;;;;SAIgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;;IAE5D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC/B,IAAI,KAAK,KAAK,SAAS;QAAE,OAAO,KAAK,CAAC;IAEtC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;IAElD,MAAM,MAAM,GAAG,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAC/B,QAAQ,OAAO,CAAC,GAAG,CAAC,GAAG,MAAM,GAAG,CAAC,EAAE;AACrC,CAAC;AAED;;;SAGgB,GAAG,CAAgB,MAAmB;IACpD,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;IAClD,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;QAAE,OAAO;IAE/B,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,EAAG,CAAC;IAC1B,OAAO,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;AAC5B,CAAC;AAED;;;SAGgB,MAAM,CAAgB,MAAmB,EAAE,GAAM;IAC/D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;IAC/B,IAAI,KAAK,KAAK,SAAS;QAAE,OAAO;IAEhC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;IAClD,KAAK,IAAI,CAAC,GAAG,KAAK,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QAC7C,MAAM,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;QACnB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;QACjB,OAAO,CAAC,CAAC,CAAE,EAAE,CAAC;KACf;IACD,OAAO,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC;IACzB,KAAK,CAAC,GAAG,EAAE,CAAC;AACd;;;;"}
|
|
||||||
83
node_modules/@jridgewell/set-array/dist/set-array.umd.js
generated
vendored
83
node_modules/@jridgewell/set-array/dist/set-array.umd.js
generated
vendored
@ -1,83 +0,0 @@
|
|||||||
(function (global, factory) {
|
|
||||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
|
||||||
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
|
||||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.setArray = {}));
|
|
||||||
})(this, (function (exports) { 'use strict';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
|
||||||
* index of the `key` in the backing array.
|
|
||||||
*
|
|
||||||
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
|
||||||
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
|
||||||
* and there are never duplicates.
|
|
||||||
*/
|
|
||||||
class SetArray {
|
|
||||||
constructor() {
|
|
||||||
this._indexes = { __proto__: null };
|
|
||||||
this.array = [];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Typescript doesn't allow friend access to private fields, so this just casts the set into a type
|
|
||||||
* with public access modifiers.
|
|
||||||
*/
|
|
||||||
function cast(set) {
|
|
||||||
return set;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Gets the index associated with `key` in the backing array, if it is already present.
|
|
||||||
*/
|
|
||||||
function get(setarr, key) {
|
|
||||||
return cast(setarr)._indexes[key];
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Puts `key` into the backing array, if it is not already present. Returns
|
|
||||||
* the index of the `key` in the backing array.
|
|
||||||
*/
|
|
||||||
function put(setarr, key) {
|
|
||||||
// The key may or may not be present. If it is present, it's a number.
|
|
||||||
const index = get(setarr, key);
|
|
||||||
if (index !== undefined)
|
|
||||||
return index;
|
|
||||||
const { array, _indexes: indexes } = cast(setarr);
|
|
||||||
const length = array.push(key);
|
|
||||||
return (indexes[key] = length - 1);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Pops the last added item out of the SetArray.
|
|
||||||
*/
|
|
||||||
function pop(setarr) {
|
|
||||||
const { array, _indexes: indexes } = cast(setarr);
|
|
||||||
if (array.length === 0)
|
|
||||||
return;
|
|
||||||
const last = array.pop();
|
|
||||||
indexes[last] = undefined;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Removes the key, if it exists in the set.
|
|
||||||
*/
|
|
||||||
function remove(setarr, key) {
|
|
||||||
const index = get(setarr, key);
|
|
||||||
if (index === undefined)
|
|
||||||
return;
|
|
||||||
const { array, _indexes: indexes } = cast(setarr);
|
|
||||||
for (let i = index + 1; i < array.length; i++) {
|
|
||||||
const k = array[i];
|
|
||||||
array[i - 1] = k;
|
|
||||||
indexes[k]--;
|
|
||||||
}
|
|
||||||
indexes[key] = undefined;
|
|
||||||
array.pop();
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.SetArray = SetArray;
|
|
||||||
exports.get = get;
|
|
||||||
exports.pop = pop;
|
|
||||||
exports.put = put;
|
|
||||||
exports.remove = remove;
|
|
||||||
|
|
||||||
Object.defineProperty(exports, '__esModule', { value: true });
|
|
||||||
|
|
||||||
}));
|
|
||||||
//# sourceMappingURL=set-array.umd.js.map
|
|
||||||
1
node_modules/@jridgewell/set-array/dist/set-array.umd.js.map
generated
vendored
1
node_modules/@jridgewell/set-array/dist/set-array.umd.js.map
generated
vendored
@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"set-array.umd.js","sources":["../src/set-array.ts"],"sourcesContent":["type Key = string | number | symbol;\n\n/**\n * SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the\n * index of the `key` in the backing array.\n *\n * This is designed to allow synchronizing a second array with the contents of the backing array,\n * like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,\n * and there are never duplicates.\n */\nexport class SetArray<T extends Key = Key> {\n private declare _indexes: Record<T, number | undefined>;\n declare array: readonly T[];\n\n constructor() {\n this._indexes = { __proto__: null } as any;\n this.array = [];\n }\n}\n\ninterface PublicSet<T extends Key> {\n array: T[];\n _indexes: SetArray<T>['_indexes'];\n}\n\n/**\n * Typescript doesn't allow friend access to private fields, so this just casts the set into a type\n * with public access modifiers.\n */\nfunction cast<T extends Key>(set: SetArray<T>): PublicSet<T> {\n return set as any;\n}\n\n/**\n * Gets the index associated with `key` in the backing array, if it is already present.\n */\nexport function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined {\n return cast(setarr)._indexes[key];\n}\n\n/**\n * Puts `key` into the backing array, if it is not already present. Returns\n * the index of the `key` in the backing array.\n */\nexport function put<T extends Key>(setarr: SetArray<T>, key: T): number {\n // The key may or may not be present. If it is present, it's a number.\n const index = get(setarr, key);\n if (index !== undefined) return index;\n\n const { array, _indexes: indexes } = cast(setarr);\n\n const length = array.push(key);\n return (indexes[key] = length - 1);\n}\n\n/**\n * Pops the last added item out of the SetArray.\n */\nexport function pop<T extends Key>(setarr: SetArray<T>): void {\n const { array, _indexes: indexes } = cast(setarr);\n if (array.length === 0) return;\n\n const last = array.pop()!;\n indexes[last] = undefined;\n}\n\n/**\n * Removes the key, if it exists in the set.\n */\nexport function remove<T extends Key>(setarr: SetArray<T>, key: T): void {\n const index = get(setarr, key);\n if (index === undefined) return;\n\n const { array, _indexes: indexes } = cast(setarr);\n for (let i = index + 1; i < array.length; i++) {\n const k = array[i];\n array[i - 1] = k;\n indexes[k]!--;\n }\n indexes[key] = undefined;\n array.pop();\n}\n"],"names":[],"mappings":";;;;;;IAEA;;;;;;;;UAQa,QAAQ;QAInB;YACE,IAAI,CAAC,QAAQ,GAAG,EAAE,SAAS,EAAE,IAAI,EAAS,CAAC;YAC3C,IAAI,CAAC,KAAK,GAAG,EAAE,CAAC;SACjB;KACF;IAOD;;;;IAIA,SAAS,IAAI,CAAgB,GAAgB;QAC3C,OAAO,GAAU,CAAC;IACpB,CAAC;IAED;;;aAGgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;QAC5D,OAAO,IAAI,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;IACpC,CAAC;IAED;;;;aAIgB,GAAG,CAAgB,MAAmB,EAAE,GAAM;;QAE5D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAC/B,IAAI,KAAK,KAAK,SAAS;YAAE,OAAO,KAAK,CAAC;QAEtC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;QAElD,MAAM,MAAM,GAAG,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;QAC/B,QAAQ,OAAO,CAAC,GAAG,CAAC,GAAG,MAAM,GAAG,CAAC,EAAE;IACrC,CAAC;IAED;;;aAGgB,GAAG,CAAgB,MAAmB;QACpD,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;QAClD,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO;QAE/B,MAAM,IAAI,GAAG,KAAK,CAAC,GAAG,EAAG,CAAC;QAC1B,OAAO,CAAC,IAAI,CAAC,GAAG,SAAS,CAAC;IAC5B,CAAC;IAED;;;aAGgB,MAAM,CAAgB,MAAmB,EAAE,GAAM;QAC/D,MAAM,KAAK,GAAG,GAAG,CAAC,MAAM,EAAE,GAAG,CAAC,CAAC;QAC/B,IAAI,KAAK,KAAK,SAAS;YAAE,OAAO;QAEhC,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,CAAC;QAClD,KAAK,IAAI,CAAC,GAAG,KAAK,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YAC7C,MAAM,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;YACnB,KAAK,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC;YACjB,OAAO,CAAC,CAAC,CAAE,EAAE,CAAC;SACf;QACD,OAAO,CAAC,GAAG,CAAC,GAAG,SAAS,CAAC;QACzB,KAAK,CAAC,GAAG,EAAE,CAAC;IACd;;;;;;;;;;;;;;"}
|
|
||||||
32
node_modules/@jridgewell/set-array/dist/types/set-array.d.ts
generated
vendored
32
node_modules/@jridgewell/set-array/dist/types/set-array.d.ts
generated
vendored
@ -1,32 +0,0 @@
|
|||||||
declare type Key = string | number | symbol;
|
|
||||||
/**
|
|
||||||
* SetArray acts like a `Set` (allowing only one occurrence of a string `key`), but provides the
|
|
||||||
* index of the `key` in the backing array.
|
|
||||||
*
|
|
||||||
* This is designed to allow synchronizing a second array with the contents of the backing array,
|
|
||||||
* like how in a sourcemap `sourcesContent[i]` is the source content associated with `source[i]`,
|
|
||||||
* and there are never duplicates.
|
|
||||||
*/
|
|
||||||
export declare class SetArray<T extends Key = Key> {
|
|
||||||
private _indexes;
|
|
||||||
array: readonly T[];
|
|
||||||
constructor();
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Gets the index associated with `key` in the backing array, if it is already present.
|
|
||||||
*/
|
|
||||||
export declare function get<T extends Key>(setarr: SetArray<T>, key: T): number | undefined;
|
|
||||||
/**
|
|
||||||
* Puts `key` into the backing array, if it is not already present. Returns
|
|
||||||
* the index of the `key` in the backing array.
|
|
||||||
*/
|
|
||||||
export declare function put<T extends Key>(setarr: SetArray<T>, key: T): number;
|
|
||||||
/**
|
|
||||||
* Pops the last added item out of the SetArray.
|
|
||||||
*/
|
|
||||||
export declare function pop<T extends Key>(setarr: SetArray<T>): void;
|
|
||||||
/**
|
|
||||||
* Removes the key, if it exists in the set.
|
|
||||||
*/
|
|
||||||
export declare function remove<T extends Key>(setarr: SetArray<T>, key: T): void;
|
|
||||||
export {};
|
|
||||||
65
node_modules/@jridgewell/set-array/package.json
generated
vendored
65
node_modules/@jridgewell/set-array/package.json
generated
vendored
@ -1,65 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@jridgewell/set-array",
|
|
||||||
"version": "1.2.1",
|
|
||||||
"description": "Like a Set, but provides the index of the `key` in the backing array",
|
|
||||||
"keywords": [],
|
|
||||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
|
||||||
"license": "MIT",
|
|
||||||
"repository": "https://github.com/jridgewell/set-array",
|
|
||||||
"main": "dist/set-array.umd.js",
|
|
||||||
"module": "dist/set-array.mjs",
|
|
||||||
"typings": "dist/types/set-array.d.ts",
|
|
||||||
"exports": {
|
|
||||||
".": [
|
|
||||||
{
|
|
||||||
"types": "./dist/types/set-array.d.ts",
|
|
||||||
"browser": "./dist/set-array.umd.js",
|
|
||||||
"require": "./dist/set-array.umd.js",
|
|
||||||
"import": "./dist/set-array.mjs"
|
|
||||||
},
|
|
||||||
"./dist/set-array.umd.js"
|
|
||||||
],
|
|
||||||
"./package.json": "./package.json"
|
|
||||||
},
|
|
||||||
"files": [
|
|
||||||
"dist"
|
|
||||||
],
|
|
||||||
"engines": {
|
|
||||||
"node": ">=6.0.0"
|
|
||||||
},
|
|
||||||
"scripts": {
|
|
||||||
"prebuild": "rm -rf dist",
|
|
||||||
"build": "run-s -n build:*",
|
|
||||||
"build:rollup": "rollup -c rollup.config.js",
|
|
||||||
"build:ts": "tsc --project tsconfig.build.json",
|
|
||||||
"lint": "run-s -n lint:*",
|
|
||||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
|
||||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
|
||||||
"test": "run-s -n test:lint test:only",
|
|
||||||
"test:debug": "mocha --inspect-brk",
|
|
||||||
"test:lint": "run-s -n test:lint:*",
|
|
||||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
|
||||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
|
||||||
"test:only": "mocha",
|
|
||||||
"test:coverage": "c8 mocha",
|
|
||||||
"test:watch": "mocha --watch",
|
|
||||||
"prepublishOnly": "npm run preversion",
|
|
||||||
"preversion": "run-s test build"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@rollup/plugin-typescript": "8.3.0",
|
|
||||||
"@types/mocha": "9.1.1",
|
|
||||||
"@types/node": "17.0.29",
|
|
||||||
"@typescript-eslint/eslint-plugin": "5.10.0",
|
|
||||||
"@typescript-eslint/parser": "5.10.0",
|
|
||||||
"c8": "7.11.0",
|
|
||||||
"eslint": "8.7.0",
|
|
||||||
"eslint-config-prettier": "8.3.0",
|
|
||||||
"mocha": "9.2.0",
|
|
||||||
"npm-run-all": "4.1.5",
|
|
||||||
"prettier": "2.5.1",
|
|
||||||
"rollup": "2.66.0",
|
|
||||||
"tsx": "4.7.1",
|
|
||||||
"typescript": "4.5.5"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
21
node_modules/@jridgewell/sourcemap-codec/LICENSE
generated
vendored
21
node_modules/@jridgewell/sourcemap-codec/LICENSE
generated
vendored
@ -1,21 +0,0 @@
|
|||||||
The MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2015 Rich Harris
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
|
||||||
all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
||||||
THE SOFTWARE.
|
|
||||||
264
node_modules/@jridgewell/sourcemap-codec/README.md
generated
vendored
264
node_modules/@jridgewell/sourcemap-codec/README.md
generated
vendored
@ -1,264 +0,0 @@
|
|||||||
# @jridgewell/sourcemap-codec
|
|
||||||
|
|
||||||
Encode/decode the `mappings` property of a [sourcemap](https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit).
|
|
||||||
|
|
||||||
|
|
||||||
## Why?
|
|
||||||
|
|
||||||
Sourcemaps are difficult to generate and manipulate, because the `mappings` property – the part that actually links the generated code back to the original source – is encoded using an obscure method called [Variable-length quantity](https://en.wikipedia.org/wiki/Variable-length_quantity). On top of that, each segment in the mapping contains offsets rather than absolute indices, which means that you can't look at a segment in isolation – you have to understand the whole sourcemap.
|
|
||||||
|
|
||||||
This package makes the process slightly easier.
|
|
||||||
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npm install @jridgewell/sourcemap-codec
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
```js
|
|
||||||
import { encode, decode } from '@jridgewell/sourcemap-codec';
|
|
||||||
|
|
||||||
var decoded = decode( ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
|
|
||||||
|
|
||||||
assert.deepEqual( decoded, [
|
|
||||||
// the first line (of the generated code) has no mappings,
|
|
||||||
// as shown by the starting semi-colon (which separates lines)
|
|
||||||
[],
|
|
||||||
|
|
||||||
// the second line contains four (comma-separated) segments
|
|
||||||
[
|
|
||||||
// segments are encoded as you'd expect:
|
|
||||||
// [ generatedCodeColumn, sourceIndex, sourceCodeLine, sourceCodeColumn, nameIndex ]
|
|
||||||
|
|
||||||
// i.e. the first segment begins at column 2, and maps back to the second column
|
|
||||||
// of the second line (both zero-based) of the 0th source, and uses the 0th
|
|
||||||
// name in the `map.names` array
|
|
||||||
[ 2, 0, 2, 2, 0 ],
|
|
||||||
|
|
||||||
// the remaining segments are 4-length rather than 5-length,
|
|
||||||
// because they don't map a name
|
|
||||||
[ 4, 0, 2, 4 ],
|
|
||||||
[ 6, 0, 2, 5 ],
|
|
||||||
[ 7, 0, 2, 7 ]
|
|
||||||
],
|
|
||||||
|
|
||||||
// the final line contains two segments
|
|
||||||
[
|
|
||||||
[ 2, 1, 10, 19 ],
|
|
||||||
[ 12, 1, 11, 20 ]
|
|
||||||
]
|
|
||||||
]);
|
|
||||||
|
|
||||||
var encoded = encode( decoded );
|
|
||||||
assert.equal( encoded, ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
|
|
||||||
```
|
|
||||||
|
|
||||||
## Benchmarks
|
|
||||||
|
|
||||||
```
|
|
||||||
node v20.10.0
|
|
||||||
|
|
||||||
amp.js.map - 45120 segments
|
|
||||||
|
|
||||||
Decode Memory Usage:
|
|
||||||
local code 5815135 bytes
|
|
||||||
@jridgewell/sourcemap-codec 1.4.15 5868160 bytes
|
|
||||||
sourcemap-codec 5492584 bytes
|
|
||||||
source-map-0.6.1 13569984 bytes
|
|
||||||
source-map-0.8.0 6390584 bytes
|
|
||||||
chrome dev tools 8011136 bytes
|
|
||||||
Smallest memory usage is sourcemap-codec
|
|
||||||
|
|
||||||
Decode speed:
|
|
||||||
decode: local code x 492 ops/sec ±1.22% (90 runs sampled)
|
|
||||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 499 ops/sec ±1.16% (89 runs sampled)
|
|
||||||
decode: sourcemap-codec x 376 ops/sec ±1.66% (89 runs sampled)
|
|
||||||
decode: source-map-0.6.1 x 34.99 ops/sec ±0.94% (48 runs sampled)
|
|
||||||
decode: source-map-0.8.0 x 351 ops/sec ±0.07% (95 runs sampled)
|
|
||||||
chrome dev tools x 165 ops/sec ±0.91% (86 runs sampled)
|
|
||||||
Fastest is decode: @jridgewell/sourcemap-codec 1.4.15
|
|
||||||
|
|
||||||
Encode Memory Usage:
|
|
||||||
local code 444248 bytes
|
|
||||||
@jridgewell/sourcemap-codec 1.4.15 623024 bytes
|
|
||||||
sourcemap-codec 8696280 bytes
|
|
||||||
source-map-0.6.1 8745176 bytes
|
|
||||||
source-map-0.8.0 8736624 bytes
|
|
||||||
Smallest memory usage is local code
|
|
||||||
|
|
||||||
Encode speed:
|
|
||||||
encode: local code x 796 ops/sec ±0.11% (97 runs sampled)
|
|
||||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 795 ops/sec ±0.25% (98 runs sampled)
|
|
||||||
encode: sourcemap-codec x 231 ops/sec ±0.83% (86 runs sampled)
|
|
||||||
encode: source-map-0.6.1 x 166 ops/sec ±0.57% (86 runs sampled)
|
|
||||||
encode: source-map-0.8.0 x 203 ops/sec ±0.45% (88 runs sampled)
|
|
||||||
Fastest is encode: local code,encode: @jridgewell/sourcemap-codec 1.4.15
|
|
||||||
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
|
|
||||||
babel.min.js.map - 347793 segments
|
|
||||||
|
|
||||||
Decode Memory Usage:
|
|
||||||
local code 35424960 bytes
|
|
||||||
@jridgewell/sourcemap-codec 1.4.15 35424696 bytes
|
|
||||||
sourcemap-codec 36033464 bytes
|
|
||||||
source-map-0.6.1 62253704 bytes
|
|
||||||
source-map-0.8.0 43843920 bytes
|
|
||||||
chrome dev tools 45111400 bytes
|
|
||||||
Smallest memory usage is @jridgewell/sourcemap-codec 1.4.15
|
|
||||||
|
|
||||||
Decode speed:
|
|
||||||
decode: local code x 38.18 ops/sec ±5.44% (52 runs sampled)
|
|
||||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 38.36 ops/sec ±5.02% (52 runs sampled)
|
|
||||||
decode: sourcemap-codec x 34.05 ops/sec ±4.45% (47 runs sampled)
|
|
||||||
decode: source-map-0.6.1 x 4.31 ops/sec ±2.76% (15 runs sampled)
|
|
||||||
decode: source-map-0.8.0 x 55.60 ops/sec ±0.13% (73 runs sampled)
|
|
||||||
chrome dev tools x 16.94 ops/sec ±3.78% (46 runs sampled)
|
|
||||||
Fastest is decode: source-map-0.8.0
|
|
||||||
|
|
||||||
Encode Memory Usage:
|
|
||||||
local code 2606016 bytes
|
|
||||||
@jridgewell/sourcemap-codec 1.4.15 2626440 bytes
|
|
||||||
sourcemap-codec 21152576 bytes
|
|
||||||
source-map-0.6.1 25023928 bytes
|
|
||||||
source-map-0.8.0 25256448 bytes
|
|
||||||
Smallest memory usage is local code
|
|
||||||
|
|
||||||
Encode speed:
|
|
||||||
encode: local code x 127 ops/sec ±0.18% (83 runs sampled)
|
|
||||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 128 ops/sec ±0.26% (83 runs sampled)
|
|
||||||
encode: sourcemap-codec x 29.31 ops/sec ±2.55% (53 runs sampled)
|
|
||||||
encode: source-map-0.6.1 x 18.85 ops/sec ±3.19% (36 runs sampled)
|
|
||||||
encode: source-map-0.8.0 x 19.34 ops/sec ±1.97% (36 runs sampled)
|
|
||||||
Fastest is encode: @jridgewell/sourcemap-codec 1.4.15
|
|
||||||
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
|
|
||||||
preact.js.map - 1992 segments
|
|
||||||
|
|
||||||
Decode Memory Usage:
|
|
||||||
local code 261696 bytes
|
|
||||||
@jridgewell/sourcemap-codec 1.4.15 244296 bytes
|
|
||||||
sourcemap-codec 302816 bytes
|
|
||||||
source-map-0.6.1 939176 bytes
|
|
||||||
source-map-0.8.0 336 bytes
|
|
||||||
chrome dev tools 587368 bytes
|
|
||||||
Smallest memory usage is source-map-0.8.0
|
|
||||||
|
|
||||||
Decode speed:
|
|
||||||
decode: local code x 17,782 ops/sec ±0.32% (97 runs sampled)
|
|
||||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 17,863 ops/sec ±0.40% (100 runs sampled)
|
|
||||||
decode: sourcemap-codec x 12,453 ops/sec ±0.27% (101 runs sampled)
|
|
||||||
decode: source-map-0.6.1 x 1,288 ops/sec ±1.05% (96 runs sampled)
|
|
||||||
decode: source-map-0.8.0 x 9,289 ops/sec ±0.27% (101 runs sampled)
|
|
||||||
chrome dev tools x 4,769 ops/sec ±0.18% (100 runs sampled)
|
|
||||||
Fastest is decode: @jridgewell/sourcemap-codec 1.4.15
|
|
||||||
|
|
||||||
Encode Memory Usage:
|
|
||||||
local code 262944 bytes
|
|
||||||
@jridgewell/sourcemap-codec 1.4.15 25544 bytes
|
|
||||||
sourcemap-codec 323048 bytes
|
|
||||||
source-map-0.6.1 507808 bytes
|
|
||||||
source-map-0.8.0 507480 bytes
|
|
||||||
Smallest memory usage is @jridgewell/sourcemap-codec 1.4.15
|
|
||||||
|
|
||||||
Encode speed:
|
|
||||||
encode: local code x 24,207 ops/sec ±0.79% (95 runs sampled)
|
|
||||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 24,288 ops/sec ±0.48% (96 runs sampled)
|
|
||||||
encode: sourcemap-codec x 6,761 ops/sec ±0.21% (100 runs sampled)
|
|
||||||
encode: source-map-0.6.1 x 5,374 ops/sec ±0.17% (99 runs sampled)
|
|
||||||
encode: source-map-0.8.0 x 5,633 ops/sec ±0.32% (99 runs sampled)
|
|
||||||
Fastest is encode: @jridgewell/sourcemap-codec 1.4.15,encode: local code
|
|
||||||
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
|
|
||||||
react.js.map - 5726 segments
|
|
||||||
|
|
||||||
Decode Memory Usage:
|
|
||||||
local code 678816 bytes
|
|
||||||
@jridgewell/sourcemap-codec 1.4.15 678816 bytes
|
|
||||||
sourcemap-codec 816400 bytes
|
|
||||||
source-map-0.6.1 2288864 bytes
|
|
||||||
source-map-0.8.0 721360 bytes
|
|
||||||
chrome dev tools 1012512 bytes
|
|
||||||
Smallest memory usage is local code
|
|
||||||
|
|
||||||
Decode speed:
|
|
||||||
decode: local code x 6,178 ops/sec ±0.19% (98 runs sampled)
|
|
||||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 6,261 ops/sec ±0.22% (100 runs sampled)
|
|
||||||
decode: sourcemap-codec x 4,472 ops/sec ±0.90% (99 runs sampled)
|
|
||||||
decode: source-map-0.6.1 x 449 ops/sec ±0.31% (95 runs sampled)
|
|
||||||
decode: source-map-0.8.0 x 3,219 ops/sec ±0.13% (100 runs sampled)
|
|
||||||
chrome dev tools x 1,743 ops/sec ±0.20% (99 runs sampled)
|
|
||||||
Fastest is decode: @jridgewell/sourcemap-codec 1.4.15
|
|
||||||
|
|
||||||
Encode Memory Usage:
|
|
||||||
local code 140960 bytes
|
|
||||||
@jridgewell/sourcemap-codec 1.4.15 159808 bytes
|
|
||||||
sourcemap-codec 969304 bytes
|
|
||||||
source-map-0.6.1 930520 bytes
|
|
||||||
source-map-0.8.0 930248 bytes
|
|
||||||
Smallest memory usage is local code
|
|
||||||
|
|
||||||
Encode speed:
|
|
||||||
encode: local code x 8,013 ops/sec ±0.19% (100 runs sampled)
|
|
||||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 7,989 ops/sec ±0.20% (101 runs sampled)
|
|
||||||
encode: sourcemap-codec x 2,472 ops/sec ±0.21% (99 runs sampled)
|
|
||||||
encode: source-map-0.6.1 x 2,200 ops/sec ±0.17% (99 runs sampled)
|
|
||||||
encode: source-map-0.8.0 x 2,220 ops/sec ±0.37% (99 runs sampled)
|
|
||||||
Fastest is encode: local code
|
|
||||||
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
|
|
||||||
vscode.map - 2141001 segments
|
|
||||||
|
|
||||||
Decode Memory Usage:
|
|
||||||
local code 198955264 bytes
|
|
||||||
@jridgewell/sourcemap-codec 1.4.15 199175352 bytes
|
|
||||||
sourcemap-codec 199102688 bytes
|
|
||||||
source-map-0.6.1 386323432 bytes
|
|
||||||
source-map-0.8.0 244116432 bytes
|
|
||||||
chrome dev tools 293734280 bytes
|
|
||||||
Smallest memory usage is local code
|
|
||||||
|
|
||||||
Decode speed:
|
|
||||||
decode: local code x 3.90 ops/sec ±22.21% (15 runs sampled)
|
|
||||||
decode: @jridgewell/sourcemap-codec 1.4.15 x 3.95 ops/sec ±23.53% (15 runs sampled)
|
|
||||||
decode: sourcemap-codec x 3.82 ops/sec ±17.94% (14 runs sampled)
|
|
||||||
decode: source-map-0.6.1 x 0.61 ops/sec ±7.81% (6 runs sampled)
|
|
||||||
decode: source-map-0.8.0 x 9.54 ops/sec ±0.28% (28 runs sampled)
|
|
||||||
chrome dev tools x 2.18 ops/sec ±10.58% (10 runs sampled)
|
|
||||||
Fastest is decode: source-map-0.8.0
|
|
||||||
|
|
||||||
Encode Memory Usage:
|
|
||||||
local code 13509880 bytes
|
|
||||||
@jridgewell/sourcemap-codec 1.4.15 13537648 bytes
|
|
||||||
sourcemap-codec 32540104 bytes
|
|
||||||
source-map-0.6.1 127531040 bytes
|
|
||||||
source-map-0.8.0 127535312 bytes
|
|
||||||
Smallest memory usage is local code
|
|
||||||
|
|
||||||
Encode speed:
|
|
||||||
encode: local code x 20.10 ops/sec ±0.19% (38 runs sampled)
|
|
||||||
encode: @jridgewell/sourcemap-codec 1.4.15 x 20.26 ops/sec ±0.32% (38 runs sampled)
|
|
||||||
encode: sourcemap-codec x 5.44 ops/sec ±1.64% (18 runs sampled)
|
|
||||||
encode: source-map-0.6.1 x 2.30 ops/sec ±4.79% (10 runs sampled)
|
|
||||||
encode: source-map-0.8.0 x 2.46 ops/sec ±6.53% (10 runs sampled)
|
|
||||||
Fastest is encode: @jridgewell/sourcemap-codec 1.4.15
|
|
||||||
```
|
|
||||||
|
|
||||||
# License
|
|
||||||
|
|
||||||
MIT
|
|
||||||
424
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs
generated
vendored
424
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs
generated
vendored
@ -1,424 +0,0 @@
|
|||||||
const comma = ','.charCodeAt(0);
|
|
||||||
const semicolon = ';'.charCodeAt(0);
|
|
||||||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
|
||||||
const intToChar = new Uint8Array(64); // 64 possible chars.
|
|
||||||
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
|
||||||
for (let i = 0; i < chars.length; i++) {
|
|
||||||
const c = chars.charCodeAt(i);
|
|
||||||
intToChar[i] = c;
|
|
||||||
charToInt[c] = i;
|
|
||||||
}
|
|
||||||
function decodeInteger(reader, relative) {
|
|
||||||
let value = 0;
|
|
||||||
let shift = 0;
|
|
||||||
let integer = 0;
|
|
||||||
do {
|
|
||||||
const c = reader.next();
|
|
||||||
integer = charToInt[c];
|
|
||||||
value |= (integer & 31) << shift;
|
|
||||||
shift += 5;
|
|
||||||
} while (integer & 32);
|
|
||||||
const shouldNegate = value & 1;
|
|
||||||
value >>>= 1;
|
|
||||||
if (shouldNegate) {
|
|
||||||
value = -0x80000000 | -value;
|
|
||||||
}
|
|
||||||
return relative + value;
|
|
||||||
}
|
|
||||||
function encodeInteger(builder, num, relative) {
|
|
||||||
let delta = num - relative;
|
|
||||||
delta = delta < 0 ? (-delta << 1) | 1 : delta << 1;
|
|
||||||
do {
|
|
||||||
let clamped = delta & 0b011111;
|
|
||||||
delta >>>= 5;
|
|
||||||
if (delta > 0)
|
|
||||||
clamped |= 0b100000;
|
|
||||||
builder.write(intToChar[clamped]);
|
|
||||||
} while (delta > 0);
|
|
||||||
return num;
|
|
||||||
}
|
|
||||||
function hasMoreVlq(reader, max) {
|
|
||||||
if (reader.pos >= max)
|
|
||||||
return false;
|
|
||||||
return reader.peek() !== comma;
|
|
||||||
}
|
|
||||||
|
|
||||||
const bufLength = 1024 * 16;
|
|
||||||
// Provide a fallback for older environments.
|
|
||||||
const td = typeof TextDecoder !== 'undefined'
|
|
||||||
? /* #__PURE__ */ new TextDecoder()
|
|
||||||
: typeof Buffer !== 'undefined'
|
|
||||||
? {
|
|
||||||
decode(buf) {
|
|
||||||
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
|
||||||
return out.toString();
|
|
||||||
},
|
|
||||||
}
|
|
||||||
: {
|
|
||||||
decode(buf) {
|
|
||||||
let out = '';
|
|
||||||
for (let i = 0; i < buf.length; i++) {
|
|
||||||
out += String.fromCharCode(buf[i]);
|
|
||||||
}
|
|
||||||
return out;
|
|
||||||
},
|
|
||||||
};
|
|
||||||
class StringWriter {
|
|
||||||
constructor() {
|
|
||||||
this.pos = 0;
|
|
||||||
this.out = '';
|
|
||||||
this.buffer = new Uint8Array(bufLength);
|
|
||||||
}
|
|
||||||
write(v) {
|
|
||||||
const { buffer } = this;
|
|
||||||
buffer[this.pos++] = v;
|
|
||||||
if (this.pos === bufLength) {
|
|
||||||
this.out += td.decode(buffer);
|
|
||||||
this.pos = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
flush() {
|
|
||||||
const { buffer, out, pos } = this;
|
|
||||||
return pos > 0 ? out + td.decode(buffer.subarray(0, pos)) : out;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
class StringReader {
|
|
||||||
constructor(buffer) {
|
|
||||||
this.pos = 0;
|
|
||||||
this.buffer = buffer;
|
|
||||||
}
|
|
||||||
next() {
|
|
||||||
return this.buffer.charCodeAt(this.pos++);
|
|
||||||
}
|
|
||||||
peek() {
|
|
||||||
return this.buffer.charCodeAt(this.pos);
|
|
||||||
}
|
|
||||||
indexOf(char) {
|
|
||||||
const { buffer, pos } = this;
|
|
||||||
const idx = buffer.indexOf(char, pos);
|
|
||||||
return idx === -1 ? buffer.length : idx;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const EMPTY = [];
|
|
||||||
function decodeOriginalScopes(input) {
|
|
||||||
const { length } = input;
|
|
||||||
const reader = new StringReader(input);
|
|
||||||
const scopes = [];
|
|
||||||
const stack = [];
|
|
||||||
let line = 0;
|
|
||||||
for (; reader.pos < length; reader.pos++) {
|
|
||||||
line = decodeInteger(reader, line);
|
|
||||||
const column = decodeInteger(reader, 0);
|
|
||||||
if (!hasMoreVlq(reader, length)) {
|
|
||||||
const last = stack.pop();
|
|
||||||
last[2] = line;
|
|
||||||
last[3] = column;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
const kind = decodeInteger(reader, 0);
|
|
||||||
const fields = decodeInteger(reader, 0);
|
|
||||||
const hasName = fields & 0b0001;
|
|
||||||
const scope = (hasName ? [line, column, 0, 0, kind, decodeInteger(reader, 0)] : [line, column, 0, 0, kind]);
|
|
||||||
let vars = EMPTY;
|
|
||||||
if (hasMoreVlq(reader, length)) {
|
|
||||||
vars = [];
|
|
||||||
do {
|
|
||||||
const varsIndex = decodeInteger(reader, 0);
|
|
||||||
vars.push(varsIndex);
|
|
||||||
} while (hasMoreVlq(reader, length));
|
|
||||||
}
|
|
||||||
scope.vars = vars;
|
|
||||||
scopes.push(scope);
|
|
||||||
stack.push(scope);
|
|
||||||
}
|
|
||||||
return scopes;
|
|
||||||
}
|
|
||||||
function encodeOriginalScopes(scopes) {
|
|
||||||
const writer = new StringWriter();
|
|
||||||
for (let i = 0; i < scopes.length;) {
|
|
||||||
i = _encodeOriginalScopes(scopes, i, writer, [0]);
|
|
||||||
}
|
|
||||||
return writer.flush();
|
|
||||||
}
|
|
||||||
function _encodeOriginalScopes(scopes, index, writer, state) {
|
|
||||||
const scope = scopes[index];
|
|
||||||
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, 4: kind, vars } = scope;
|
|
||||||
if (index > 0)
|
|
||||||
writer.write(comma);
|
|
||||||
state[0] = encodeInteger(writer, startLine, state[0]);
|
|
||||||
encodeInteger(writer, startColumn, 0);
|
|
||||||
encodeInteger(writer, kind, 0);
|
|
||||||
const fields = scope.length === 6 ? 0b0001 : 0;
|
|
||||||
encodeInteger(writer, fields, 0);
|
|
||||||
if (scope.length === 6)
|
|
||||||
encodeInteger(writer, scope[5], 0);
|
|
||||||
for (const v of vars) {
|
|
||||||
encodeInteger(writer, v, 0);
|
|
||||||
}
|
|
||||||
for (index++; index < scopes.length;) {
|
|
||||||
const next = scopes[index];
|
|
||||||
const { 0: l, 1: c } = next;
|
|
||||||
if (l > endLine || (l === endLine && c >= endColumn)) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
index = _encodeOriginalScopes(scopes, index, writer, state);
|
|
||||||
}
|
|
||||||
writer.write(comma);
|
|
||||||
state[0] = encodeInteger(writer, endLine, state[0]);
|
|
||||||
encodeInteger(writer, endColumn, 0);
|
|
||||||
return index;
|
|
||||||
}
|
|
||||||
function decodeGeneratedRanges(input) {
|
|
||||||
const { length } = input;
|
|
||||||
const reader = new StringReader(input);
|
|
||||||
const ranges = [];
|
|
||||||
const stack = [];
|
|
||||||
let genLine = 0;
|
|
||||||
let definitionSourcesIndex = 0;
|
|
||||||
let definitionScopeIndex = 0;
|
|
||||||
let callsiteSourcesIndex = 0;
|
|
||||||
let callsiteLine = 0;
|
|
||||||
let callsiteColumn = 0;
|
|
||||||
let bindingLine = 0;
|
|
||||||
let bindingColumn = 0;
|
|
||||||
do {
|
|
||||||
const semi = reader.indexOf(';');
|
|
||||||
let genColumn = 0;
|
|
||||||
for (; reader.pos < semi; reader.pos++) {
|
|
||||||
genColumn = decodeInteger(reader, genColumn);
|
|
||||||
if (!hasMoreVlq(reader, semi)) {
|
|
||||||
const last = stack.pop();
|
|
||||||
last[2] = genLine;
|
|
||||||
last[3] = genColumn;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
const fields = decodeInteger(reader, 0);
|
|
||||||
const hasDefinition = fields & 0b0001;
|
|
||||||
const hasCallsite = fields & 0b0010;
|
|
||||||
const hasScope = fields & 0b0100;
|
|
||||||
let callsite = null;
|
|
||||||
let bindings = EMPTY;
|
|
||||||
let range;
|
|
||||||
if (hasDefinition) {
|
|
||||||
const defSourcesIndex = decodeInteger(reader, definitionSourcesIndex);
|
|
||||||
definitionScopeIndex = decodeInteger(reader, definitionSourcesIndex === defSourcesIndex ? definitionScopeIndex : 0);
|
|
||||||
definitionSourcesIndex = defSourcesIndex;
|
|
||||||
range = [genLine, genColumn, 0, 0, defSourcesIndex, definitionScopeIndex];
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
range = [genLine, genColumn, 0, 0];
|
|
||||||
}
|
|
||||||
range.isScope = !!hasScope;
|
|
||||||
if (hasCallsite) {
|
|
||||||
const prevCsi = callsiteSourcesIndex;
|
|
||||||
const prevLine = callsiteLine;
|
|
||||||
callsiteSourcesIndex = decodeInteger(reader, callsiteSourcesIndex);
|
|
||||||
const sameSource = prevCsi === callsiteSourcesIndex;
|
|
||||||
callsiteLine = decodeInteger(reader, sameSource ? callsiteLine : 0);
|
|
||||||
callsiteColumn = decodeInteger(reader, sameSource && prevLine === callsiteLine ? callsiteColumn : 0);
|
|
||||||
callsite = [callsiteSourcesIndex, callsiteLine, callsiteColumn];
|
|
||||||
}
|
|
||||||
range.callsite = callsite;
|
|
||||||
if (hasMoreVlq(reader, semi)) {
|
|
||||||
bindings = [];
|
|
||||||
do {
|
|
||||||
bindingLine = genLine;
|
|
||||||
bindingColumn = genColumn;
|
|
||||||
const expressionsCount = decodeInteger(reader, 0);
|
|
||||||
let expressionRanges;
|
|
||||||
if (expressionsCount < -1) {
|
|
||||||
expressionRanges = [[decodeInteger(reader, 0)]];
|
|
||||||
for (let i = -1; i > expressionsCount; i--) {
|
|
||||||
const prevBl = bindingLine;
|
|
||||||
bindingLine = decodeInteger(reader, bindingLine);
|
|
||||||
bindingColumn = decodeInteger(reader, bindingLine === prevBl ? bindingColumn : 0);
|
|
||||||
const expression = decodeInteger(reader, 0);
|
|
||||||
expressionRanges.push([expression, bindingLine, bindingColumn]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
expressionRanges = [[expressionsCount]];
|
|
||||||
}
|
|
||||||
bindings.push(expressionRanges);
|
|
||||||
} while (hasMoreVlq(reader, semi));
|
|
||||||
}
|
|
||||||
range.bindings = bindings;
|
|
||||||
ranges.push(range);
|
|
||||||
stack.push(range);
|
|
||||||
}
|
|
||||||
genLine++;
|
|
||||||
reader.pos = semi + 1;
|
|
||||||
} while (reader.pos < length);
|
|
||||||
return ranges;
|
|
||||||
}
|
|
||||||
function encodeGeneratedRanges(ranges) {
|
|
||||||
if (ranges.length === 0)
|
|
||||||
return '';
|
|
||||||
const writer = new StringWriter();
|
|
||||||
for (let i = 0; i < ranges.length;) {
|
|
||||||
i = _encodeGeneratedRanges(ranges, i, writer, [0, 0, 0, 0, 0, 0, 0]);
|
|
||||||
}
|
|
||||||
return writer.flush();
|
|
||||||
}
|
|
||||||
function _encodeGeneratedRanges(ranges, index, writer, state) {
|
|
||||||
const range = ranges[index];
|
|
||||||
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, isScope, callsite, bindings, } = range;
|
|
||||||
if (state[0] < startLine) {
|
|
||||||
catchupLine(writer, state[0], startLine);
|
|
||||||
state[0] = startLine;
|
|
||||||
state[1] = 0;
|
|
||||||
}
|
|
||||||
else if (index > 0) {
|
|
||||||
writer.write(comma);
|
|
||||||
}
|
|
||||||
state[1] = encodeInteger(writer, range[1], state[1]);
|
|
||||||
const fields = (range.length === 6 ? 0b0001 : 0) | (callsite ? 0b0010 : 0) | (isScope ? 0b0100 : 0);
|
|
||||||
encodeInteger(writer, fields, 0);
|
|
||||||
if (range.length === 6) {
|
|
||||||
const { 4: sourcesIndex, 5: scopesIndex } = range;
|
|
||||||
if (sourcesIndex !== state[2]) {
|
|
||||||
state[3] = 0;
|
|
||||||
}
|
|
||||||
state[2] = encodeInteger(writer, sourcesIndex, state[2]);
|
|
||||||
state[3] = encodeInteger(writer, scopesIndex, state[3]);
|
|
||||||
}
|
|
||||||
if (callsite) {
|
|
||||||
const { 0: sourcesIndex, 1: callLine, 2: callColumn } = range.callsite;
|
|
||||||
if (sourcesIndex !== state[4]) {
|
|
||||||
state[5] = 0;
|
|
||||||
state[6] = 0;
|
|
||||||
}
|
|
||||||
else if (callLine !== state[5]) {
|
|
||||||
state[6] = 0;
|
|
||||||
}
|
|
||||||
state[4] = encodeInteger(writer, sourcesIndex, state[4]);
|
|
||||||
state[5] = encodeInteger(writer, callLine, state[5]);
|
|
||||||
state[6] = encodeInteger(writer, callColumn, state[6]);
|
|
||||||
}
|
|
||||||
if (bindings) {
|
|
||||||
for (const binding of bindings) {
|
|
||||||
if (binding.length > 1)
|
|
||||||
encodeInteger(writer, -binding.length, 0);
|
|
||||||
const expression = binding[0][0];
|
|
||||||
encodeInteger(writer, expression, 0);
|
|
||||||
let bindingStartLine = startLine;
|
|
||||||
let bindingStartColumn = startColumn;
|
|
||||||
for (let i = 1; i < binding.length; i++) {
|
|
||||||
const expRange = binding[i];
|
|
||||||
bindingStartLine = encodeInteger(writer, expRange[1], bindingStartLine);
|
|
||||||
bindingStartColumn = encodeInteger(writer, expRange[2], bindingStartColumn);
|
|
||||||
encodeInteger(writer, expRange[0], 0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (index++; index < ranges.length;) {
|
|
||||||
const next = ranges[index];
|
|
||||||
const { 0: l, 1: c } = next;
|
|
||||||
if (l > endLine || (l === endLine && c >= endColumn)) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
index = _encodeGeneratedRanges(ranges, index, writer, state);
|
|
||||||
}
|
|
||||||
if (state[0] < endLine) {
|
|
||||||
catchupLine(writer, state[0], endLine);
|
|
||||||
state[0] = endLine;
|
|
||||||
state[1] = 0;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
writer.write(comma);
|
|
||||||
}
|
|
||||||
state[1] = encodeInteger(writer, endColumn, state[1]);
|
|
||||||
return index;
|
|
||||||
}
|
|
||||||
function catchupLine(writer, lastLine, line) {
|
|
||||||
do {
|
|
||||||
writer.write(semicolon);
|
|
||||||
} while (++lastLine < line);
|
|
||||||
}
|
|
||||||
|
|
||||||
function decode(mappings) {
|
|
||||||
const { length } = mappings;
|
|
||||||
const reader = new StringReader(mappings);
|
|
||||||
const decoded = [];
|
|
||||||
let genColumn = 0;
|
|
||||||
let sourcesIndex = 0;
|
|
||||||
let sourceLine = 0;
|
|
||||||
let sourceColumn = 0;
|
|
||||||
let namesIndex = 0;
|
|
||||||
do {
|
|
||||||
const semi = reader.indexOf(';');
|
|
||||||
const line = [];
|
|
||||||
let sorted = true;
|
|
||||||
let lastCol = 0;
|
|
||||||
genColumn = 0;
|
|
||||||
while (reader.pos < semi) {
|
|
||||||
let seg;
|
|
||||||
genColumn = decodeInteger(reader, genColumn);
|
|
||||||
if (genColumn < lastCol)
|
|
||||||
sorted = false;
|
|
||||||
lastCol = genColumn;
|
|
||||||
if (hasMoreVlq(reader, semi)) {
|
|
||||||
sourcesIndex = decodeInteger(reader, sourcesIndex);
|
|
||||||
sourceLine = decodeInteger(reader, sourceLine);
|
|
||||||
sourceColumn = decodeInteger(reader, sourceColumn);
|
|
||||||
if (hasMoreVlq(reader, semi)) {
|
|
||||||
namesIndex = decodeInteger(reader, namesIndex);
|
|
||||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
seg = [genColumn];
|
|
||||||
}
|
|
||||||
line.push(seg);
|
|
||||||
reader.pos++;
|
|
||||||
}
|
|
||||||
if (!sorted)
|
|
||||||
sort(line);
|
|
||||||
decoded.push(line);
|
|
||||||
reader.pos = semi + 1;
|
|
||||||
} while (reader.pos <= length);
|
|
||||||
return decoded;
|
|
||||||
}
|
|
||||||
function sort(line) {
|
|
||||||
line.sort(sortComparator);
|
|
||||||
}
|
|
||||||
function sortComparator(a, b) {
|
|
||||||
return a[0] - b[0];
|
|
||||||
}
|
|
||||||
function encode(decoded) {
|
|
||||||
const writer = new StringWriter();
|
|
||||||
let sourcesIndex = 0;
|
|
||||||
let sourceLine = 0;
|
|
||||||
let sourceColumn = 0;
|
|
||||||
let namesIndex = 0;
|
|
||||||
for (let i = 0; i < decoded.length; i++) {
|
|
||||||
const line = decoded[i];
|
|
||||||
if (i > 0)
|
|
||||||
writer.write(semicolon);
|
|
||||||
if (line.length === 0)
|
|
||||||
continue;
|
|
||||||
let genColumn = 0;
|
|
||||||
for (let j = 0; j < line.length; j++) {
|
|
||||||
const segment = line[j];
|
|
||||||
if (j > 0)
|
|
||||||
writer.write(comma);
|
|
||||||
genColumn = encodeInteger(writer, segment[0], genColumn);
|
|
||||||
if (segment.length === 1)
|
|
||||||
continue;
|
|
||||||
sourcesIndex = encodeInteger(writer, segment[1], sourcesIndex);
|
|
||||||
sourceLine = encodeInteger(writer, segment[2], sourceLine);
|
|
||||||
sourceColumn = encodeInteger(writer, segment[3], sourceColumn);
|
|
||||||
if (segment.length === 4)
|
|
||||||
continue;
|
|
||||||
namesIndex = encodeInteger(writer, segment[4], namesIndex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return writer.flush();
|
|
||||||
}
|
|
||||||
|
|
||||||
export { decode, decodeGeneratedRanges, decodeOriginalScopes, encode, encodeGeneratedRanges, encodeOriginalScopes };
|
|
||||||
//# sourceMappingURL=sourcemap-codec.mjs.map
|
|
||||||
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map
generated
vendored
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs.map
generated
vendored
File diff suppressed because one or more lines are too long
439
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js
generated
vendored
439
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js
generated
vendored
@ -1,439 +0,0 @@
|
|||||||
(function (global, factory) {
|
|
||||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
|
||||||
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
|
||||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.sourcemapCodec = {}));
|
|
||||||
})(this, (function (exports) { 'use strict';
|
|
||||||
|
|
||||||
const comma = ','.charCodeAt(0);
|
|
||||||
const semicolon = ';'.charCodeAt(0);
|
|
||||||
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
|
||||||
const intToChar = new Uint8Array(64); // 64 possible chars.
|
|
||||||
const charToInt = new Uint8Array(128); // z is 122 in ASCII
|
|
||||||
for (let i = 0; i < chars.length; i++) {
|
|
||||||
const c = chars.charCodeAt(i);
|
|
||||||
intToChar[i] = c;
|
|
||||||
charToInt[c] = i;
|
|
||||||
}
|
|
||||||
function decodeInteger(reader, relative) {
|
|
||||||
let value = 0;
|
|
||||||
let shift = 0;
|
|
||||||
let integer = 0;
|
|
||||||
do {
|
|
||||||
const c = reader.next();
|
|
||||||
integer = charToInt[c];
|
|
||||||
value |= (integer & 31) << shift;
|
|
||||||
shift += 5;
|
|
||||||
} while (integer & 32);
|
|
||||||
const shouldNegate = value & 1;
|
|
||||||
value >>>= 1;
|
|
||||||
if (shouldNegate) {
|
|
||||||
value = -0x80000000 | -value;
|
|
||||||
}
|
|
||||||
return relative + value;
|
|
||||||
}
|
|
||||||
function encodeInteger(builder, num, relative) {
|
|
||||||
let delta = num - relative;
|
|
||||||
delta = delta < 0 ? (-delta << 1) | 1 : delta << 1;
|
|
||||||
do {
|
|
||||||
let clamped = delta & 0b011111;
|
|
||||||
delta >>>= 5;
|
|
||||||
if (delta > 0)
|
|
||||||
clamped |= 0b100000;
|
|
||||||
builder.write(intToChar[clamped]);
|
|
||||||
} while (delta > 0);
|
|
||||||
return num;
|
|
||||||
}
|
|
||||||
function hasMoreVlq(reader, max) {
|
|
||||||
if (reader.pos >= max)
|
|
||||||
return false;
|
|
||||||
return reader.peek() !== comma;
|
|
||||||
}
|
|
||||||
|
|
||||||
const bufLength = 1024 * 16;
|
|
||||||
// Provide a fallback for older environments.
|
|
||||||
const td = typeof TextDecoder !== 'undefined'
|
|
||||||
? /* #__PURE__ */ new TextDecoder()
|
|
||||||
: typeof Buffer !== 'undefined'
|
|
||||||
? {
|
|
||||||
decode(buf) {
|
|
||||||
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
|
|
||||||
return out.toString();
|
|
||||||
},
|
|
||||||
}
|
|
||||||
: {
|
|
||||||
decode(buf) {
|
|
||||||
let out = '';
|
|
||||||
for (let i = 0; i < buf.length; i++) {
|
|
||||||
out += String.fromCharCode(buf[i]);
|
|
||||||
}
|
|
||||||
return out;
|
|
||||||
},
|
|
||||||
};
|
|
||||||
class StringWriter {
|
|
||||||
constructor() {
|
|
||||||
this.pos = 0;
|
|
||||||
this.out = '';
|
|
||||||
this.buffer = new Uint8Array(bufLength);
|
|
||||||
}
|
|
||||||
write(v) {
|
|
||||||
const { buffer } = this;
|
|
||||||
buffer[this.pos++] = v;
|
|
||||||
if (this.pos === bufLength) {
|
|
||||||
this.out += td.decode(buffer);
|
|
||||||
this.pos = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
flush() {
|
|
||||||
const { buffer, out, pos } = this;
|
|
||||||
return pos > 0 ? out + td.decode(buffer.subarray(0, pos)) : out;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
class StringReader {
|
|
||||||
constructor(buffer) {
|
|
||||||
this.pos = 0;
|
|
||||||
this.buffer = buffer;
|
|
||||||
}
|
|
||||||
next() {
|
|
||||||
return this.buffer.charCodeAt(this.pos++);
|
|
||||||
}
|
|
||||||
peek() {
|
|
||||||
return this.buffer.charCodeAt(this.pos);
|
|
||||||
}
|
|
||||||
indexOf(char) {
|
|
||||||
const { buffer, pos } = this;
|
|
||||||
const idx = buffer.indexOf(char, pos);
|
|
||||||
return idx === -1 ? buffer.length : idx;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const EMPTY = [];
|
|
||||||
function decodeOriginalScopes(input) {
|
|
||||||
const { length } = input;
|
|
||||||
const reader = new StringReader(input);
|
|
||||||
const scopes = [];
|
|
||||||
const stack = [];
|
|
||||||
let line = 0;
|
|
||||||
for (; reader.pos < length; reader.pos++) {
|
|
||||||
line = decodeInteger(reader, line);
|
|
||||||
const column = decodeInteger(reader, 0);
|
|
||||||
if (!hasMoreVlq(reader, length)) {
|
|
||||||
const last = stack.pop();
|
|
||||||
last[2] = line;
|
|
||||||
last[3] = column;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
const kind = decodeInteger(reader, 0);
|
|
||||||
const fields = decodeInteger(reader, 0);
|
|
||||||
const hasName = fields & 0b0001;
|
|
||||||
const scope = (hasName ? [line, column, 0, 0, kind, decodeInteger(reader, 0)] : [line, column, 0, 0, kind]);
|
|
||||||
let vars = EMPTY;
|
|
||||||
if (hasMoreVlq(reader, length)) {
|
|
||||||
vars = [];
|
|
||||||
do {
|
|
||||||
const varsIndex = decodeInteger(reader, 0);
|
|
||||||
vars.push(varsIndex);
|
|
||||||
} while (hasMoreVlq(reader, length));
|
|
||||||
}
|
|
||||||
scope.vars = vars;
|
|
||||||
scopes.push(scope);
|
|
||||||
stack.push(scope);
|
|
||||||
}
|
|
||||||
return scopes;
|
|
||||||
}
|
|
||||||
function encodeOriginalScopes(scopes) {
|
|
||||||
const writer = new StringWriter();
|
|
||||||
for (let i = 0; i < scopes.length;) {
|
|
||||||
i = _encodeOriginalScopes(scopes, i, writer, [0]);
|
|
||||||
}
|
|
||||||
return writer.flush();
|
|
||||||
}
|
|
||||||
function _encodeOriginalScopes(scopes, index, writer, state) {
|
|
||||||
const scope = scopes[index];
|
|
||||||
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, 4: kind, vars } = scope;
|
|
||||||
if (index > 0)
|
|
||||||
writer.write(comma);
|
|
||||||
state[0] = encodeInteger(writer, startLine, state[0]);
|
|
||||||
encodeInteger(writer, startColumn, 0);
|
|
||||||
encodeInteger(writer, kind, 0);
|
|
||||||
const fields = scope.length === 6 ? 0b0001 : 0;
|
|
||||||
encodeInteger(writer, fields, 0);
|
|
||||||
if (scope.length === 6)
|
|
||||||
encodeInteger(writer, scope[5], 0);
|
|
||||||
for (const v of vars) {
|
|
||||||
encodeInteger(writer, v, 0);
|
|
||||||
}
|
|
||||||
for (index++; index < scopes.length;) {
|
|
||||||
const next = scopes[index];
|
|
||||||
const { 0: l, 1: c } = next;
|
|
||||||
if (l > endLine || (l === endLine && c >= endColumn)) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
index = _encodeOriginalScopes(scopes, index, writer, state);
|
|
||||||
}
|
|
||||||
writer.write(comma);
|
|
||||||
state[0] = encodeInteger(writer, endLine, state[0]);
|
|
||||||
encodeInteger(writer, endColumn, 0);
|
|
||||||
return index;
|
|
||||||
}
|
|
||||||
function decodeGeneratedRanges(input) {
|
|
||||||
const { length } = input;
|
|
||||||
const reader = new StringReader(input);
|
|
||||||
const ranges = [];
|
|
||||||
const stack = [];
|
|
||||||
let genLine = 0;
|
|
||||||
let definitionSourcesIndex = 0;
|
|
||||||
let definitionScopeIndex = 0;
|
|
||||||
let callsiteSourcesIndex = 0;
|
|
||||||
let callsiteLine = 0;
|
|
||||||
let callsiteColumn = 0;
|
|
||||||
let bindingLine = 0;
|
|
||||||
let bindingColumn = 0;
|
|
||||||
do {
|
|
||||||
const semi = reader.indexOf(';');
|
|
||||||
let genColumn = 0;
|
|
||||||
for (; reader.pos < semi; reader.pos++) {
|
|
||||||
genColumn = decodeInteger(reader, genColumn);
|
|
||||||
if (!hasMoreVlq(reader, semi)) {
|
|
||||||
const last = stack.pop();
|
|
||||||
last[2] = genLine;
|
|
||||||
last[3] = genColumn;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
const fields = decodeInteger(reader, 0);
|
|
||||||
const hasDefinition = fields & 0b0001;
|
|
||||||
const hasCallsite = fields & 0b0010;
|
|
||||||
const hasScope = fields & 0b0100;
|
|
||||||
let callsite = null;
|
|
||||||
let bindings = EMPTY;
|
|
||||||
let range;
|
|
||||||
if (hasDefinition) {
|
|
||||||
const defSourcesIndex = decodeInteger(reader, definitionSourcesIndex);
|
|
||||||
definitionScopeIndex = decodeInteger(reader, definitionSourcesIndex === defSourcesIndex ? definitionScopeIndex : 0);
|
|
||||||
definitionSourcesIndex = defSourcesIndex;
|
|
||||||
range = [genLine, genColumn, 0, 0, defSourcesIndex, definitionScopeIndex];
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
range = [genLine, genColumn, 0, 0];
|
|
||||||
}
|
|
||||||
range.isScope = !!hasScope;
|
|
||||||
if (hasCallsite) {
|
|
||||||
const prevCsi = callsiteSourcesIndex;
|
|
||||||
const prevLine = callsiteLine;
|
|
||||||
callsiteSourcesIndex = decodeInteger(reader, callsiteSourcesIndex);
|
|
||||||
const sameSource = prevCsi === callsiteSourcesIndex;
|
|
||||||
callsiteLine = decodeInteger(reader, sameSource ? callsiteLine : 0);
|
|
||||||
callsiteColumn = decodeInteger(reader, sameSource && prevLine === callsiteLine ? callsiteColumn : 0);
|
|
||||||
callsite = [callsiteSourcesIndex, callsiteLine, callsiteColumn];
|
|
||||||
}
|
|
||||||
range.callsite = callsite;
|
|
||||||
if (hasMoreVlq(reader, semi)) {
|
|
||||||
bindings = [];
|
|
||||||
do {
|
|
||||||
bindingLine = genLine;
|
|
||||||
bindingColumn = genColumn;
|
|
||||||
const expressionsCount = decodeInteger(reader, 0);
|
|
||||||
let expressionRanges;
|
|
||||||
if (expressionsCount < -1) {
|
|
||||||
expressionRanges = [[decodeInteger(reader, 0)]];
|
|
||||||
for (let i = -1; i > expressionsCount; i--) {
|
|
||||||
const prevBl = bindingLine;
|
|
||||||
bindingLine = decodeInteger(reader, bindingLine);
|
|
||||||
bindingColumn = decodeInteger(reader, bindingLine === prevBl ? bindingColumn : 0);
|
|
||||||
const expression = decodeInteger(reader, 0);
|
|
||||||
expressionRanges.push([expression, bindingLine, bindingColumn]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
expressionRanges = [[expressionsCount]];
|
|
||||||
}
|
|
||||||
bindings.push(expressionRanges);
|
|
||||||
} while (hasMoreVlq(reader, semi));
|
|
||||||
}
|
|
||||||
range.bindings = bindings;
|
|
||||||
ranges.push(range);
|
|
||||||
stack.push(range);
|
|
||||||
}
|
|
||||||
genLine++;
|
|
||||||
reader.pos = semi + 1;
|
|
||||||
} while (reader.pos < length);
|
|
||||||
return ranges;
|
|
||||||
}
|
|
||||||
function encodeGeneratedRanges(ranges) {
|
|
||||||
if (ranges.length === 0)
|
|
||||||
return '';
|
|
||||||
const writer = new StringWriter();
|
|
||||||
for (let i = 0; i < ranges.length;) {
|
|
||||||
i = _encodeGeneratedRanges(ranges, i, writer, [0, 0, 0, 0, 0, 0, 0]);
|
|
||||||
}
|
|
||||||
return writer.flush();
|
|
||||||
}
|
|
||||||
function _encodeGeneratedRanges(ranges, index, writer, state) {
|
|
||||||
const range = ranges[index];
|
|
||||||
const { 0: startLine, 1: startColumn, 2: endLine, 3: endColumn, isScope, callsite, bindings, } = range;
|
|
||||||
if (state[0] < startLine) {
|
|
||||||
catchupLine(writer, state[0], startLine);
|
|
||||||
state[0] = startLine;
|
|
||||||
state[1] = 0;
|
|
||||||
}
|
|
||||||
else if (index > 0) {
|
|
||||||
writer.write(comma);
|
|
||||||
}
|
|
||||||
state[1] = encodeInteger(writer, range[1], state[1]);
|
|
||||||
const fields = (range.length === 6 ? 0b0001 : 0) | (callsite ? 0b0010 : 0) | (isScope ? 0b0100 : 0);
|
|
||||||
encodeInteger(writer, fields, 0);
|
|
||||||
if (range.length === 6) {
|
|
||||||
const { 4: sourcesIndex, 5: scopesIndex } = range;
|
|
||||||
if (sourcesIndex !== state[2]) {
|
|
||||||
state[3] = 0;
|
|
||||||
}
|
|
||||||
state[2] = encodeInteger(writer, sourcesIndex, state[2]);
|
|
||||||
state[3] = encodeInteger(writer, scopesIndex, state[3]);
|
|
||||||
}
|
|
||||||
if (callsite) {
|
|
||||||
const { 0: sourcesIndex, 1: callLine, 2: callColumn } = range.callsite;
|
|
||||||
if (sourcesIndex !== state[4]) {
|
|
||||||
state[5] = 0;
|
|
||||||
state[6] = 0;
|
|
||||||
}
|
|
||||||
else if (callLine !== state[5]) {
|
|
||||||
state[6] = 0;
|
|
||||||
}
|
|
||||||
state[4] = encodeInteger(writer, sourcesIndex, state[4]);
|
|
||||||
state[5] = encodeInteger(writer, callLine, state[5]);
|
|
||||||
state[6] = encodeInteger(writer, callColumn, state[6]);
|
|
||||||
}
|
|
||||||
if (bindings) {
|
|
||||||
for (const binding of bindings) {
|
|
||||||
if (binding.length > 1)
|
|
||||||
encodeInteger(writer, -binding.length, 0);
|
|
||||||
const expression = binding[0][0];
|
|
||||||
encodeInteger(writer, expression, 0);
|
|
||||||
let bindingStartLine = startLine;
|
|
||||||
let bindingStartColumn = startColumn;
|
|
||||||
for (let i = 1; i < binding.length; i++) {
|
|
||||||
const expRange = binding[i];
|
|
||||||
bindingStartLine = encodeInteger(writer, expRange[1], bindingStartLine);
|
|
||||||
bindingStartColumn = encodeInteger(writer, expRange[2], bindingStartColumn);
|
|
||||||
encodeInteger(writer, expRange[0], 0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (index++; index < ranges.length;) {
|
|
||||||
const next = ranges[index];
|
|
||||||
const { 0: l, 1: c } = next;
|
|
||||||
if (l > endLine || (l === endLine && c >= endColumn)) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
index = _encodeGeneratedRanges(ranges, index, writer, state);
|
|
||||||
}
|
|
||||||
if (state[0] < endLine) {
|
|
||||||
catchupLine(writer, state[0], endLine);
|
|
||||||
state[0] = endLine;
|
|
||||||
state[1] = 0;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
writer.write(comma);
|
|
||||||
}
|
|
||||||
state[1] = encodeInteger(writer, endColumn, state[1]);
|
|
||||||
return index;
|
|
||||||
}
|
|
||||||
function catchupLine(writer, lastLine, line) {
|
|
||||||
do {
|
|
||||||
writer.write(semicolon);
|
|
||||||
} while (++lastLine < line);
|
|
||||||
}
|
|
||||||
|
|
||||||
function decode(mappings) {
|
|
||||||
const { length } = mappings;
|
|
||||||
const reader = new StringReader(mappings);
|
|
||||||
const decoded = [];
|
|
||||||
let genColumn = 0;
|
|
||||||
let sourcesIndex = 0;
|
|
||||||
let sourceLine = 0;
|
|
||||||
let sourceColumn = 0;
|
|
||||||
let namesIndex = 0;
|
|
||||||
do {
|
|
||||||
const semi = reader.indexOf(';');
|
|
||||||
const line = [];
|
|
||||||
let sorted = true;
|
|
||||||
let lastCol = 0;
|
|
||||||
genColumn = 0;
|
|
||||||
while (reader.pos < semi) {
|
|
||||||
let seg;
|
|
||||||
genColumn = decodeInteger(reader, genColumn);
|
|
||||||
if (genColumn < lastCol)
|
|
||||||
sorted = false;
|
|
||||||
lastCol = genColumn;
|
|
||||||
if (hasMoreVlq(reader, semi)) {
|
|
||||||
sourcesIndex = decodeInteger(reader, sourcesIndex);
|
|
||||||
sourceLine = decodeInteger(reader, sourceLine);
|
|
||||||
sourceColumn = decodeInteger(reader, sourceColumn);
|
|
||||||
if (hasMoreVlq(reader, semi)) {
|
|
||||||
namesIndex = decodeInteger(reader, namesIndex);
|
|
||||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
seg = [genColumn];
|
|
||||||
}
|
|
||||||
line.push(seg);
|
|
||||||
reader.pos++;
|
|
||||||
}
|
|
||||||
if (!sorted)
|
|
||||||
sort(line);
|
|
||||||
decoded.push(line);
|
|
||||||
reader.pos = semi + 1;
|
|
||||||
} while (reader.pos <= length);
|
|
||||||
return decoded;
|
|
||||||
}
|
|
||||||
function sort(line) {
|
|
||||||
line.sort(sortComparator);
|
|
||||||
}
|
|
||||||
function sortComparator(a, b) {
|
|
||||||
return a[0] - b[0];
|
|
||||||
}
|
|
||||||
function encode(decoded) {
|
|
||||||
const writer = new StringWriter();
|
|
||||||
let sourcesIndex = 0;
|
|
||||||
let sourceLine = 0;
|
|
||||||
let sourceColumn = 0;
|
|
||||||
let namesIndex = 0;
|
|
||||||
for (let i = 0; i < decoded.length; i++) {
|
|
||||||
const line = decoded[i];
|
|
||||||
if (i > 0)
|
|
||||||
writer.write(semicolon);
|
|
||||||
if (line.length === 0)
|
|
||||||
continue;
|
|
||||||
let genColumn = 0;
|
|
||||||
for (let j = 0; j < line.length; j++) {
|
|
||||||
const segment = line[j];
|
|
||||||
if (j > 0)
|
|
||||||
writer.write(comma);
|
|
||||||
genColumn = encodeInteger(writer, segment[0], genColumn);
|
|
||||||
if (segment.length === 1)
|
|
||||||
continue;
|
|
||||||
sourcesIndex = encodeInteger(writer, segment[1], sourcesIndex);
|
|
||||||
sourceLine = encodeInteger(writer, segment[2], sourceLine);
|
|
||||||
sourceColumn = encodeInteger(writer, segment[3], sourceColumn);
|
|
||||||
if (segment.length === 4)
|
|
||||||
continue;
|
|
||||||
namesIndex = encodeInteger(writer, segment[4], namesIndex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return writer.flush();
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.decode = decode;
|
|
||||||
exports.decodeGeneratedRanges = decodeGeneratedRanges;
|
|
||||||
exports.decodeOriginalScopes = decodeOriginalScopes;
|
|
||||||
exports.encode = encode;
|
|
||||||
exports.encodeGeneratedRanges = encodeGeneratedRanges;
|
|
||||||
exports.encodeOriginalScopes = encodeOriginalScopes;
|
|
||||||
|
|
||||||
Object.defineProperty(exports, '__esModule', { value: true });
|
|
||||||
|
|
||||||
}));
|
|
||||||
//# sourceMappingURL=sourcemap-codec.umd.js.map
|
|
||||||
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map
generated
vendored
1
node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.umd.js.map
generated
vendored
File diff suppressed because one or more lines are too long
49
node_modules/@jridgewell/sourcemap-codec/dist/types/scopes.d.ts
generated
vendored
49
node_modules/@jridgewell/sourcemap-codec/dist/types/scopes.d.ts
generated
vendored
@ -1,49 +0,0 @@
|
|||||||
declare type Line = number;
|
|
||||||
declare type Column = number;
|
|
||||||
declare type Kind = number;
|
|
||||||
declare type Name = number;
|
|
||||||
declare type Var = number;
|
|
||||||
declare type SourcesIndex = number;
|
|
||||||
declare type ScopesIndex = number;
|
|
||||||
declare type Mix<A, B, O> = (A & O) | (B & O);
|
|
||||||
export declare type OriginalScope = Mix<[
|
|
||||||
Line,
|
|
||||||
Column,
|
|
||||||
Line,
|
|
||||||
Column,
|
|
||||||
Kind
|
|
||||||
], [
|
|
||||||
Line,
|
|
||||||
Column,
|
|
||||||
Line,
|
|
||||||
Column,
|
|
||||||
Kind,
|
|
||||||
Name
|
|
||||||
], {
|
|
||||||
vars: Var[];
|
|
||||||
}>;
|
|
||||||
export declare type GeneratedRange = Mix<[
|
|
||||||
Line,
|
|
||||||
Column,
|
|
||||||
Line,
|
|
||||||
Column
|
|
||||||
], [
|
|
||||||
Line,
|
|
||||||
Column,
|
|
||||||
Line,
|
|
||||||
Column,
|
|
||||||
SourcesIndex,
|
|
||||||
ScopesIndex
|
|
||||||
], {
|
|
||||||
callsite: CallSite | null;
|
|
||||||
bindings: Binding[];
|
|
||||||
isScope: boolean;
|
|
||||||
}>;
|
|
||||||
export declare type CallSite = [SourcesIndex, Line, Column];
|
|
||||||
declare type Binding = BindingExpressionRange[];
|
|
||||||
export declare type BindingExpressionRange = [Name] | [Name, Line, Column];
|
|
||||||
export declare function decodeOriginalScopes(input: string): OriginalScope[];
|
|
||||||
export declare function encodeOriginalScopes(scopes: OriginalScope[]): string;
|
|
||||||
export declare function decodeGeneratedRanges(input: string): GeneratedRange[];
|
|
||||||
export declare function encodeGeneratedRanges(ranges: GeneratedRange[]): string;
|
|
||||||
export {};
|
|
||||||
8
node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts
generated
vendored
8
node_modules/@jridgewell/sourcemap-codec/dist/types/sourcemap-codec.d.ts
generated
vendored
@ -1,8 +0,0 @@
|
|||||||
export { decodeOriginalScopes, encodeOriginalScopes, decodeGeneratedRanges, encodeGeneratedRanges, } from './scopes';
|
|
||||||
export type { OriginalScope, GeneratedRange, CallSite, BindingExpressionRange } from './scopes';
|
|
||||||
export declare type SourceMapSegment = [number] | [number, number, number, number] | [number, number, number, number, number];
|
|
||||||
export declare type SourceMapLine = SourceMapSegment[];
|
|
||||||
export declare type SourceMapMappings = SourceMapLine[];
|
|
||||||
export declare function decode(mappings: string): SourceMapMappings;
|
|
||||||
export declare function encode(decoded: SourceMapMappings): string;
|
|
||||||
export declare function encode(decoded: Readonly<SourceMapMappings>): string;
|
|
||||||
15
node_modules/@jridgewell/sourcemap-codec/dist/types/strings.d.ts
generated
vendored
15
node_modules/@jridgewell/sourcemap-codec/dist/types/strings.d.ts
generated
vendored
@ -1,15 +0,0 @@
|
|||||||
export declare class StringWriter {
|
|
||||||
pos: number;
|
|
||||||
private out;
|
|
||||||
private buffer;
|
|
||||||
write(v: number): void;
|
|
||||||
flush(): string;
|
|
||||||
}
|
|
||||||
export declare class StringReader {
|
|
||||||
pos: number;
|
|
||||||
private buffer;
|
|
||||||
constructor(buffer: string);
|
|
||||||
next(): number;
|
|
||||||
peek(): number;
|
|
||||||
indexOf(char: string): number;
|
|
||||||
}
|
|
||||||
6
node_modules/@jridgewell/sourcemap-codec/dist/types/vlq.d.ts
generated
vendored
6
node_modules/@jridgewell/sourcemap-codec/dist/types/vlq.d.ts
generated
vendored
@ -1,6 +0,0 @@
|
|||||||
import type { StringReader, StringWriter } from './strings';
|
|
||||||
export declare const comma: number;
|
|
||||||
export declare const semicolon: number;
|
|
||||||
export declare function decodeInteger(reader: StringReader, relative: number): number;
|
|
||||||
export declare function encodeInteger(builder: StringWriter, num: number, relative: number): number;
|
|
||||||
export declare function hasMoreVlq(reader: StringReader, max: number): boolean;
|
|
||||||
75
node_modules/@jridgewell/sourcemap-codec/package.json
generated
vendored
75
node_modules/@jridgewell/sourcemap-codec/package.json
generated
vendored
@ -1,75 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@jridgewell/sourcemap-codec",
|
|
||||||
"version": "1.5.0",
|
|
||||||
"description": "Encode/decode sourcemap mappings",
|
|
||||||
"keywords": [
|
|
||||||
"sourcemap",
|
|
||||||
"vlq"
|
|
||||||
],
|
|
||||||
"main": "dist/sourcemap-codec.umd.js",
|
|
||||||
"module": "dist/sourcemap-codec.mjs",
|
|
||||||
"types": "dist/types/sourcemap-codec.d.ts",
|
|
||||||
"files": [
|
|
||||||
"dist"
|
|
||||||
],
|
|
||||||
"exports": {
|
|
||||||
".": [
|
|
||||||
{
|
|
||||||
"types": "./dist/types/sourcemap-codec.d.ts",
|
|
||||||
"browser": "./dist/sourcemap-codec.umd.js",
|
|
||||||
"require": "./dist/sourcemap-codec.umd.js",
|
|
||||||
"import": "./dist/sourcemap-codec.mjs"
|
|
||||||
},
|
|
||||||
"./dist/sourcemap-codec.umd.js"
|
|
||||||
],
|
|
||||||
"./package.json": "./package.json"
|
|
||||||
},
|
|
||||||
"scripts": {
|
|
||||||
"benchmark": "run-s build:rollup benchmark:*",
|
|
||||||
"benchmark:install": "cd benchmark && npm install",
|
|
||||||
"benchmark:only": "node --expose-gc benchmark/index.js",
|
|
||||||
"build": "run-s -n build:*",
|
|
||||||
"build:rollup": "rollup -c rollup.config.js",
|
|
||||||
"build:ts": "tsc --project tsconfig.build.json",
|
|
||||||
"lint": "run-s -n lint:*",
|
|
||||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
|
||||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
|
||||||
"prebuild": "rm -rf dist",
|
|
||||||
"prepublishOnly": "npm run preversion",
|
|
||||||
"preversion": "run-s test build",
|
|
||||||
"test": "run-s -n test:lint test:only",
|
|
||||||
"test:debug": "mocha --inspect-brk",
|
|
||||||
"test:lint": "run-s -n test:lint:*",
|
|
||||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
|
|
||||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
|
||||||
"test:only": "mocha",
|
|
||||||
"test:coverage": "c8 mocha",
|
|
||||||
"test:watch": "mocha --watch"
|
|
||||||
},
|
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "git+https://github.com/jridgewell/sourcemap-codec.git"
|
|
||||||
},
|
|
||||||
"author": "Rich Harris",
|
|
||||||
"license": "MIT",
|
|
||||||
"devDependencies": {
|
|
||||||
"@rollup/plugin-typescript": "8.3.0",
|
|
||||||
"@types/mocha": "10.0.6",
|
|
||||||
"@types/node": "17.0.15",
|
|
||||||
"@typescript-eslint/eslint-plugin": "5.10.0",
|
|
||||||
"@typescript-eslint/parser": "5.10.0",
|
|
||||||
"benchmark": "2.1.4",
|
|
||||||
"c8": "7.11.2",
|
|
||||||
"eslint": "8.7.0",
|
|
||||||
"eslint-config-prettier": "8.3.0",
|
|
||||||
"mocha": "9.2.0",
|
|
||||||
"npm-run-all": "4.1.5",
|
|
||||||
"prettier": "2.5.1",
|
|
||||||
"rollup": "2.64.0",
|
|
||||||
"source-map": "0.6.1",
|
|
||||||
"source-map-js": "1.0.2",
|
|
||||||
"sourcemap-codec": "1.4.8",
|
|
||||||
"tsx": "4.7.1",
|
|
||||||
"typescript": "4.5.4"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
19
node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
19
node_modules/@jridgewell/trace-mapping/LICENSE
generated
vendored
@ -1,19 +0,0 @@
|
|||||||
Copyright 2022 Justin Ridgewell <justin@ridgewell.name>
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in
|
|
||||||
all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
257
node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
257
node_modules/@jridgewell/trace-mapping/README.md
generated
vendored
@ -1,257 +0,0 @@
|
|||||||
# @jridgewell/trace-mapping
|
|
||||||
|
|
||||||
> Trace the original position through a source map
|
|
||||||
|
|
||||||
`trace-mapping` allows you to take the line and column of an output file and trace it to the
|
|
||||||
original location in the source file through a source map.
|
|
||||||
|
|
||||||
You may already be familiar with the [`source-map`][source-map] package's `SourceMapConsumer`. This
|
|
||||||
provides the same `originalPositionFor` and `generatedPositionFor` API, without requiring WASM.
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
```sh
|
|
||||||
npm install @jridgewell/trace-mapping
|
|
||||||
```
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import {
|
|
||||||
TraceMap,
|
|
||||||
originalPositionFor,
|
|
||||||
generatedPositionFor,
|
|
||||||
sourceContentFor,
|
|
||||||
isIgnored,
|
|
||||||
} from '@jridgewell/trace-mapping';
|
|
||||||
|
|
||||||
const tracer = new TraceMap({
|
|
||||||
version: 3,
|
|
||||||
sources: ['input.js'],
|
|
||||||
sourcesContent: ['content of input.js'],
|
|
||||||
names: ['foo'],
|
|
||||||
mappings: 'KAyCIA',
|
|
||||||
ignoreList: [],
|
|
||||||
});
|
|
||||||
|
|
||||||
// Lines start at line 1, columns at column 0.
|
|
||||||
const traced = originalPositionFor(tracer, { line: 1, column: 5 });
|
|
||||||
assert.deepEqual(traced, {
|
|
||||||
source: 'input.js',
|
|
||||||
line: 42,
|
|
||||||
column: 4,
|
|
||||||
name: 'foo',
|
|
||||||
});
|
|
||||||
|
|
||||||
const content = sourceContentFor(tracer, traced.source);
|
|
||||||
assert.strictEqual(content, 'content for input.js');
|
|
||||||
|
|
||||||
const generated = generatedPositionFor(tracer, {
|
|
||||||
source: 'input.js',
|
|
||||||
line: 42,
|
|
||||||
column: 4,
|
|
||||||
});
|
|
||||||
assert.deepEqual(generated, {
|
|
||||||
line: 1,
|
|
||||||
column: 5,
|
|
||||||
});
|
|
||||||
|
|
||||||
const ignored = isIgnored(tracer, 'input.js');
|
|
||||||
assert.equal(ignored, false);
|
|
||||||
```
|
|
||||||
|
|
||||||
We also provide a lower level API to get the actual segment that matches our line and column. Unlike
|
|
||||||
`originalPositionFor`, `traceSegment` uses a 0-base for `line`:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { traceSegment } from '@jridgewell/trace-mapping';
|
|
||||||
|
|
||||||
// line is 0-base.
|
|
||||||
const traced = traceSegment(tracer, /* line */ 0, /* column */ 5);
|
|
||||||
|
|
||||||
// Segments are [outputColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
|
|
||||||
// Again, line is 0-base and so is sourceLine
|
|
||||||
assert.deepEqual(traced, [5, 0, 41, 4, 0]);
|
|
||||||
```
|
|
||||||
|
|
||||||
### SectionedSourceMaps
|
|
||||||
|
|
||||||
The sourcemap spec defines a special `sections` field that's designed to handle concatenation of
|
|
||||||
output code with associated sourcemaps. This type of sourcemap is rarely used (no major build tool
|
|
||||||
produces it), but if you are hand coding a concatenation you may need it. We provide an `AnyMap`
|
|
||||||
helper that can receive either a regular sourcemap or a `SectionedSourceMap` and returns a
|
|
||||||
`TraceMap` instance:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { AnyMap } from '@jridgewell/trace-mapping';
|
|
||||||
const fooOutput = 'foo';
|
|
||||||
const barOutput = 'bar';
|
|
||||||
const output = [fooOutput, barOutput].join('\n');
|
|
||||||
|
|
||||||
const sectioned = new AnyMap({
|
|
||||||
version: 3,
|
|
||||||
sections: [
|
|
||||||
{
|
|
||||||
// 0-base line and column
|
|
||||||
offset: { line: 0, column: 0 },
|
|
||||||
// fooOutput's sourcemap
|
|
||||||
map: {
|
|
||||||
version: 3,
|
|
||||||
sources: ['foo.js'],
|
|
||||||
names: ['foo'],
|
|
||||||
mappings: 'AAAAA',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// barOutput's sourcemap will not affect the first line, only the second
|
|
||||||
offset: { line: 1, column: 0 },
|
|
||||||
map: {
|
|
||||||
version: 3,
|
|
||||||
sources: ['bar.js'],
|
|
||||||
names: ['bar'],
|
|
||||||
mappings: 'AAAAA',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
const traced = originalPositionFor(sectioned, {
|
|
||||||
line: 2,
|
|
||||||
column: 0,
|
|
||||||
});
|
|
||||||
|
|
||||||
assert.deepEqual(traced, {
|
|
||||||
source: 'bar.js',
|
|
||||||
line: 1,
|
|
||||||
column: 0,
|
|
||||||
name: 'bar',
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
## Benchmarks
|
|
||||||
|
|
||||||
```
|
|
||||||
node v18.0.0
|
|
||||||
|
|
||||||
amp.js.map - 45120 segments
|
|
||||||
|
|
||||||
Memory Usage:
|
|
||||||
trace-mapping decoded 562400 bytes
|
|
||||||
trace-mapping encoded 5706544 bytes
|
|
||||||
source-map-js 10717664 bytes
|
|
||||||
source-map-0.6.1 17446384 bytes
|
|
||||||
source-map-0.8.0 9701757 bytes
|
|
||||||
Smallest memory usage is trace-mapping decoded
|
|
||||||
|
|
||||||
Init speed:
|
|
||||||
trace-mapping: decoded JSON input x 180 ops/sec ±0.34% (85 runs sampled)
|
|
||||||
trace-mapping: encoded JSON input x 364 ops/sec ±1.77% (89 runs sampled)
|
|
||||||
trace-mapping: decoded Object input x 3,116 ops/sec ±0.50% (96 runs sampled)
|
|
||||||
trace-mapping: encoded Object input x 410 ops/sec ±2.62% (85 runs sampled)
|
|
||||||
source-map-js: encoded Object input x 84.23 ops/sec ±0.91% (73 runs sampled)
|
|
||||||
source-map-0.6.1: encoded Object input x 37.21 ops/sec ±2.08% (51 runs sampled)
|
|
||||||
Fastest is trace-mapping: decoded Object input
|
|
||||||
|
|
||||||
Trace speed:
|
|
||||||
trace-mapping: decoded originalPositionFor x 3,952,212 ops/sec ±0.17% (98 runs sampled)
|
|
||||||
trace-mapping: encoded originalPositionFor x 3,487,468 ops/sec ±1.58% (90 runs sampled)
|
|
||||||
source-map-js: encoded originalPositionFor x 827,730 ops/sec ±0.78% (97 runs sampled)
|
|
||||||
source-map-0.6.1: encoded originalPositionFor x 748,991 ops/sec ±0.53% (94 runs sampled)
|
|
||||||
source-map-0.8.0: encoded originalPositionFor x 2,532,894 ops/sec ±0.57% (95 runs sampled)
|
|
||||||
Fastest is trace-mapping: decoded originalPositionFor
|
|
||||||
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
|
|
||||||
babel.min.js.map - 347793 segments
|
|
||||||
|
|
||||||
Memory Usage:
|
|
||||||
trace-mapping decoded 89832 bytes
|
|
||||||
trace-mapping encoded 35474640 bytes
|
|
||||||
source-map-js 51257176 bytes
|
|
||||||
source-map-0.6.1 63515664 bytes
|
|
||||||
source-map-0.8.0 42933752 bytes
|
|
||||||
Smallest memory usage is trace-mapping decoded
|
|
||||||
|
|
||||||
Init speed:
|
|
||||||
trace-mapping: decoded JSON input x 15.41 ops/sec ±8.65% (34 runs sampled)
|
|
||||||
trace-mapping: encoded JSON input x 28.20 ops/sec ±12.87% (42 runs sampled)
|
|
||||||
trace-mapping: decoded Object input x 964 ops/sec ±0.36% (99 runs sampled)
|
|
||||||
trace-mapping: encoded Object input x 31.77 ops/sec ±13.79% (45 runs sampled)
|
|
||||||
source-map-js: encoded Object input x 6.45 ops/sec ±5.16% (21 runs sampled)
|
|
||||||
source-map-0.6.1: encoded Object input x 4.07 ops/sec ±5.24% (15 runs sampled)
|
|
||||||
Fastest is trace-mapping: decoded Object input
|
|
||||||
|
|
||||||
Trace speed:
|
|
||||||
trace-mapping: decoded originalPositionFor x 7,183,038 ops/sec ±0.58% (95 runs sampled)
|
|
||||||
trace-mapping: encoded originalPositionFor x 5,192,185 ops/sec ±0.41% (100 runs sampled)
|
|
||||||
source-map-js: encoded originalPositionFor x 4,259,489 ops/sec ±0.79% (94 runs sampled)
|
|
||||||
source-map-0.6.1: encoded originalPositionFor x 3,742,629 ops/sec ±0.71% (95 runs sampled)
|
|
||||||
source-map-0.8.0: encoded originalPositionFor x 6,270,211 ops/sec ±0.64% (94 runs sampled)
|
|
||||||
Fastest is trace-mapping: decoded originalPositionFor
|
|
||||||
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
|
|
||||||
preact.js.map - 1992 segments
|
|
||||||
|
|
||||||
Memory Usage:
|
|
||||||
trace-mapping decoded 37128 bytes
|
|
||||||
trace-mapping encoded 247280 bytes
|
|
||||||
source-map-js 1143536 bytes
|
|
||||||
source-map-0.6.1 1290992 bytes
|
|
||||||
source-map-0.8.0 96544 bytes
|
|
||||||
Smallest memory usage is trace-mapping decoded
|
|
||||||
|
|
||||||
Init speed:
|
|
||||||
trace-mapping: decoded JSON input x 3,483 ops/sec ±0.30% (98 runs sampled)
|
|
||||||
trace-mapping: encoded JSON input x 6,092 ops/sec ±0.18% (97 runs sampled)
|
|
||||||
trace-mapping: decoded Object input x 249,076 ops/sec ±0.24% (98 runs sampled)
|
|
||||||
trace-mapping: encoded Object input x 14,555 ops/sec ±0.48% (100 runs sampled)
|
|
||||||
source-map-js: encoded Object input x 2,447 ops/sec ±0.36% (99 runs sampled)
|
|
||||||
source-map-0.6.1: encoded Object input x 1,201 ops/sec ±0.57% (96 runs sampled)
|
|
||||||
Fastest is trace-mapping: decoded Object input
|
|
||||||
|
|
||||||
Trace speed:
|
|
||||||
trace-mapping: decoded originalPositionFor x 7,620,192 ops/sec ±0.09% (99 runs sampled)
|
|
||||||
trace-mapping: encoded originalPositionFor x 6,872,554 ops/sec ±0.30% (97 runs sampled)
|
|
||||||
source-map-js: encoded originalPositionFor x 2,489,570 ops/sec ±0.35% (94 runs sampled)
|
|
||||||
source-map-0.6.1: encoded originalPositionFor x 1,698,633 ops/sec ±0.28% (98 runs sampled)
|
|
||||||
source-map-0.8.0: encoded originalPositionFor x 4,015,644 ops/sec ±0.22% (98 runs sampled)
|
|
||||||
Fastest is trace-mapping: decoded originalPositionFor
|
|
||||||
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
|
|
||||||
react.js.map - 5726 segments
|
|
||||||
|
|
||||||
Memory Usage:
|
|
||||||
trace-mapping decoded 16176 bytes
|
|
||||||
trace-mapping encoded 681552 bytes
|
|
||||||
source-map-js 2418352 bytes
|
|
||||||
source-map-0.6.1 2443672 bytes
|
|
||||||
source-map-0.8.0 111768 bytes
|
|
||||||
Smallest memory usage is trace-mapping decoded
|
|
||||||
|
|
||||||
Init speed:
|
|
||||||
trace-mapping: decoded JSON input x 1,720 ops/sec ±0.34% (98 runs sampled)
|
|
||||||
trace-mapping: encoded JSON input x 4,406 ops/sec ±0.35% (100 runs sampled)
|
|
||||||
trace-mapping: decoded Object input x 92,122 ops/sec ±0.10% (99 runs sampled)
|
|
||||||
trace-mapping: encoded Object input x 5,385 ops/sec ±0.37% (99 runs sampled)
|
|
||||||
source-map-js: encoded Object input x 794 ops/sec ±0.40% (98 runs sampled)
|
|
||||||
source-map-0.6.1: encoded Object input x 416 ops/sec ±0.54% (91 runs sampled)
|
|
||||||
Fastest is trace-mapping: decoded Object input
|
|
||||||
|
|
||||||
Trace speed:
|
|
||||||
trace-mapping: decoded originalPositionFor x 32,759,519 ops/sec ±0.33% (100 runs sampled)
|
|
||||||
trace-mapping: encoded originalPositionFor x 31,116,306 ops/sec ±0.33% (97 runs sampled)
|
|
||||||
source-map-js: encoded originalPositionFor x 17,458,435 ops/sec ±0.44% (97 runs sampled)
|
|
||||||
source-map-0.6.1: encoded originalPositionFor x 12,687,097 ops/sec ±0.43% (95 runs sampled)
|
|
||||||
source-map-0.8.0: encoded originalPositionFor x 23,538,275 ops/sec ±0.38% (95 runs sampled)
|
|
||||||
Fastest is trace-mapping: decoded originalPositionFor
|
|
||||||
```
|
|
||||||
|
|
||||||
[source-map]: https://www.npmjs.com/package/source-map
|
|
||||||
580
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
580
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
generated
vendored
@ -1,580 +0,0 @@
|
|||||||
import { encode, decode } from '@jridgewell/sourcemap-codec';
|
|
||||||
import resolveUri from '@jridgewell/resolve-uri';
|
|
||||||
|
|
||||||
function resolve(input, base) {
|
|
||||||
// The base is always treated as a directory, if it's not empty.
|
|
||||||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
|
||||||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
|
||||||
if (base && !base.endsWith('/'))
|
|
||||||
base += '/';
|
|
||||||
return resolveUri(input, base);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Removes everything after the last "/", but leaves the slash.
|
|
||||||
*/
|
|
||||||
function stripFilename(path) {
|
|
||||||
if (!path)
|
|
||||||
return '';
|
|
||||||
const index = path.lastIndexOf('/');
|
|
||||||
return path.slice(0, index + 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
const COLUMN = 0;
|
|
||||||
const SOURCES_INDEX = 1;
|
|
||||||
const SOURCE_LINE = 2;
|
|
||||||
const SOURCE_COLUMN = 3;
|
|
||||||
const NAMES_INDEX = 4;
|
|
||||||
const REV_GENERATED_LINE = 1;
|
|
||||||
const REV_GENERATED_COLUMN = 2;
|
|
||||||
|
|
||||||
function maybeSort(mappings, owned) {
|
|
||||||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
|
||||||
if (unsortedIndex === mappings.length)
|
|
||||||
return mappings;
|
|
||||||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
|
||||||
// not, we do not want to modify the consumer's input array.
|
|
||||||
if (!owned)
|
|
||||||
mappings = mappings.slice();
|
|
||||||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
|
||||||
mappings[i] = sortSegments(mappings[i], owned);
|
|
||||||
}
|
|
||||||
return mappings;
|
|
||||||
}
|
|
||||||
function nextUnsortedSegmentLine(mappings, start) {
|
|
||||||
for (let i = start; i < mappings.length; i++) {
|
|
||||||
if (!isSorted(mappings[i]))
|
|
||||||
return i;
|
|
||||||
}
|
|
||||||
return mappings.length;
|
|
||||||
}
|
|
||||||
function isSorted(line) {
|
|
||||||
for (let j = 1; j < line.length; j++) {
|
|
||||||
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
function sortSegments(line, owned) {
|
|
||||||
if (!owned)
|
|
||||||
line = line.slice();
|
|
||||||
return line.sort(sortComparator);
|
|
||||||
}
|
|
||||||
function sortComparator(a, b) {
|
|
||||||
return a[COLUMN] - b[COLUMN];
|
|
||||||
}
|
|
||||||
|
|
||||||
let found = false;
|
|
||||||
/**
|
|
||||||
* A binary search implementation that returns the index if a match is found.
|
|
||||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
|
||||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
|
||||||
* the next index:
|
|
||||||
*
|
|
||||||
* ```js
|
|
||||||
* const array = [1, 3];
|
|
||||||
* const needle = 2;
|
|
||||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
|
||||||
*
|
|
||||||
* assert.equal(index, 0);
|
|
||||||
* array.splice(index + 1, 0, needle);
|
|
||||||
* assert.deepEqual(array, [1, 2, 3]);
|
|
||||||
* ```
|
|
||||||
*/
|
|
||||||
function binarySearch(haystack, needle, low, high) {
|
|
||||||
while (low <= high) {
|
|
||||||
const mid = low + ((high - low) >> 1);
|
|
||||||
const cmp = haystack[mid][COLUMN] - needle;
|
|
||||||
if (cmp === 0) {
|
|
||||||
found = true;
|
|
||||||
return mid;
|
|
||||||
}
|
|
||||||
if (cmp < 0) {
|
|
||||||
low = mid + 1;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
high = mid - 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
found = false;
|
|
||||||
return low - 1;
|
|
||||||
}
|
|
||||||
function upperBound(haystack, needle, index) {
|
|
||||||
for (let i = index + 1; i < haystack.length; index = i++) {
|
|
||||||
if (haystack[i][COLUMN] !== needle)
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
return index;
|
|
||||||
}
|
|
||||||
function lowerBound(haystack, needle, index) {
|
|
||||||
for (let i = index - 1; i >= 0; index = i--) {
|
|
||||||
if (haystack[i][COLUMN] !== needle)
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
return index;
|
|
||||||
}
|
|
||||||
function memoizedState() {
|
|
||||||
return {
|
|
||||||
lastKey: -1,
|
|
||||||
lastNeedle: -1,
|
|
||||||
lastIndex: -1,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
|
||||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
|
||||||
*/
|
|
||||||
function memoizedBinarySearch(haystack, needle, state, key) {
|
|
||||||
const { lastKey, lastNeedle, lastIndex } = state;
|
|
||||||
let low = 0;
|
|
||||||
let high = haystack.length - 1;
|
|
||||||
if (key === lastKey) {
|
|
||||||
if (needle === lastNeedle) {
|
|
||||||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
|
||||||
return lastIndex;
|
|
||||||
}
|
|
||||||
if (needle >= lastNeedle) {
|
|
||||||
// lastIndex may be -1 if the previous needle was not found.
|
|
||||||
low = lastIndex === -1 ? 0 : lastIndex;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
high = lastIndex;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
state.lastKey = key;
|
|
||||||
state.lastNeedle = needle;
|
|
||||||
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
|
||||||
// of generated line/column.
|
|
||||||
function buildBySources(decoded, memos) {
|
|
||||||
const sources = memos.map(buildNullArray);
|
|
||||||
for (let i = 0; i < decoded.length; i++) {
|
|
||||||
const line = decoded[i];
|
|
||||||
for (let j = 0; j < line.length; j++) {
|
|
||||||
const seg = line[j];
|
|
||||||
if (seg.length === 1)
|
|
||||||
continue;
|
|
||||||
const sourceIndex = seg[SOURCES_INDEX];
|
|
||||||
const sourceLine = seg[SOURCE_LINE];
|
|
||||||
const sourceColumn = seg[SOURCE_COLUMN];
|
|
||||||
const originalSource = sources[sourceIndex];
|
|
||||||
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
|
||||||
const memo = memos[sourceIndex];
|
|
||||||
// The binary search either found a match, or it found the left-index just before where the
|
|
||||||
// segment should go. Either way, we want to insert after that. And there may be multiple
|
|
||||||
// generated segments associated with an original location, so there may need to move several
|
|
||||||
// indexes before we find where we need to insert.
|
|
||||||
let index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
|
||||||
memo.lastIndex = ++index;
|
|
||||||
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return sources;
|
|
||||||
}
|
|
||||||
function insert(array, index, value) {
|
|
||||||
for (let i = array.length; i > index; i--) {
|
|
||||||
array[i] = array[i - 1];
|
|
||||||
}
|
|
||||||
array[index] = value;
|
|
||||||
}
|
|
||||||
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
|
||||||
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
|
||||||
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
|
||||||
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
|
||||||
// order when iterating with for-in.
|
|
||||||
function buildNullArray() {
|
|
||||||
return { __proto__: null };
|
|
||||||
}
|
|
||||||
|
|
||||||
const AnyMap = function (map, mapUrl) {
|
|
||||||
const parsed = parse(map);
|
|
||||||
if (!('sections' in parsed)) {
|
|
||||||
return new TraceMap(parsed, mapUrl);
|
|
||||||
}
|
|
||||||
const mappings = [];
|
|
||||||
const sources = [];
|
|
||||||
const sourcesContent = [];
|
|
||||||
const names = [];
|
|
||||||
const ignoreList = [];
|
|
||||||
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, ignoreList, 0, 0, Infinity, Infinity);
|
|
||||||
const joined = {
|
|
||||||
version: 3,
|
|
||||||
file: parsed.file,
|
|
||||||
names,
|
|
||||||
sources,
|
|
||||||
sourcesContent,
|
|
||||||
mappings,
|
|
||||||
ignoreList,
|
|
||||||
};
|
|
||||||
return presortedDecodedMap(joined);
|
|
||||||
};
|
|
||||||
function parse(map) {
|
|
||||||
return typeof map === 'string' ? JSON.parse(map) : map;
|
|
||||||
}
|
|
||||||
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
|
||||||
const { sections } = input;
|
|
||||||
for (let i = 0; i < sections.length; i++) {
|
|
||||||
const { map, offset } = sections[i];
|
|
||||||
let sl = stopLine;
|
|
||||||
let sc = stopColumn;
|
|
||||||
if (i + 1 < sections.length) {
|
|
||||||
const nextOffset = sections[i + 1].offset;
|
|
||||||
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
|
||||||
if (sl === stopLine) {
|
|
||||||
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
|
||||||
}
|
|
||||||
else if (sl < stopLine) {
|
|
||||||
sc = columnOffset + nextOffset.column;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
addSection(map, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
|
||||||
const parsed = parse(input);
|
|
||||||
if ('sections' in parsed)
|
|
||||||
return recurse(...arguments);
|
|
||||||
const map = new TraceMap(parsed, mapUrl);
|
|
||||||
const sourcesOffset = sources.length;
|
|
||||||
const namesOffset = names.length;
|
|
||||||
const decoded = decodedMappings(map);
|
|
||||||
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
|
|
||||||
append(sources, resolvedSources);
|
|
||||||
append(names, map.names);
|
|
||||||
if (contents)
|
|
||||||
append(sourcesContent, contents);
|
|
||||||
else
|
|
||||||
for (let i = 0; i < resolvedSources.length; i++)
|
|
||||||
sourcesContent.push(null);
|
|
||||||
if (ignores)
|
|
||||||
for (let i = 0; i < ignores.length; i++)
|
|
||||||
ignoreList.push(ignores[i] + sourcesOffset);
|
|
||||||
for (let i = 0; i < decoded.length; i++) {
|
|
||||||
const lineI = lineOffset + i;
|
|
||||||
// We can only add so many lines before we step into the range that the next section's map
|
|
||||||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
|
||||||
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
|
||||||
// still need to check that we don't overstep lines, too.
|
|
||||||
if (lineI > stopLine)
|
|
||||||
return;
|
|
||||||
// The out line may already exist in mappings (if we're continuing the line started by a
|
|
||||||
// previous section). Or, we may have jumped ahead several lines to start this section.
|
|
||||||
const out = getLine(mappings, lineI);
|
|
||||||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
|
||||||
// map can be multiple lines), it doesn't.
|
|
||||||
const cOffset = i === 0 ? columnOffset : 0;
|
|
||||||
const line = decoded[i];
|
|
||||||
for (let j = 0; j < line.length; j++) {
|
|
||||||
const seg = line[j];
|
|
||||||
const column = cOffset + seg[COLUMN];
|
|
||||||
// If this segment steps into the column range that the next section's map controls, we need
|
|
||||||
// to stop early.
|
|
||||||
if (lineI === stopLine && column >= stopColumn)
|
|
||||||
return;
|
|
||||||
if (seg.length === 1) {
|
|
||||||
out.push([column]);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
|
||||||
const sourceLine = seg[SOURCE_LINE];
|
|
||||||
const sourceColumn = seg[SOURCE_COLUMN];
|
|
||||||
out.push(seg.length === 4
|
|
||||||
? [column, sourcesIndex, sourceLine, sourceColumn]
|
|
||||||
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function append(arr, other) {
|
|
||||||
for (let i = 0; i < other.length; i++)
|
|
||||||
arr.push(other[i]);
|
|
||||||
}
|
|
||||||
function getLine(arr, index) {
|
|
||||||
for (let i = arr.length; i <= index; i++)
|
|
||||||
arr[i] = [];
|
|
||||||
return arr[index];
|
|
||||||
}
|
|
||||||
|
|
||||||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
|
||||||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
|
||||||
const LEAST_UPPER_BOUND = -1;
|
|
||||||
const GREATEST_LOWER_BOUND = 1;
|
|
||||||
class TraceMap {
|
|
||||||
constructor(map, mapUrl) {
|
|
||||||
const isString = typeof map === 'string';
|
|
||||||
if (!isString && map._decodedMemo)
|
|
||||||
return map;
|
|
||||||
const parsed = (isString ? JSON.parse(map) : map);
|
|
||||||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
|
||||||
this.version = version;
|
|
||||||
this.file = file;
|
|
||||||
this.names = names || [];
|
|
||||||
this.sourceRoot = sourceRoot;
|
|
||||||
this.sources = sources;
|
|
||||||
this.sourcesContent = sourcesContent;
|
|
||||||
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
|
|
||||||
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
|
||||||
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
|
||||||
const { mappings } = parsed;
|
|
||||||
if (typeof mappings === 'string') {
|
|
||||||
this._encoded = mappings;
|
|
||||||
this._decoded = undefined;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
this._encoded = undefined;
|
|
||||||
this._decoded = maybeSort(mappings, isString);
|
|
||||||
}
|
|
||||||
this._decodedMemo = memoizedState();
|
|
||||||
this._bySources = undefined;
|
|
||||||
this._bySourceMemos = undefined;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
|
||||||
* with public access modifiers.
|
|
||||||
*/
|
|
||||||
function cast(map) {
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
|
||||||
*/
|
|
||||||
function encodedMappings(map) {
|
|
||||||
var _a;
|
|
||||||
var _b;
|
|
||||||
return ((_a = (_b = cast(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = encode(cast(map)._decoded)));
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
|
||||||
*/
|
|
||||||
function decodedMappings(map) {
|
|
||||||
var _a;
|
|
||||||
return ((_a = cast(map))._decoded || (_a._decoded = decode(cast(map)._encoded)));
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
|
||||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
|
||||||
*/
|
|
||||||
function traceSegment(map, line, column) {
|
|
||||||
const decoded = decodedMappings(map);
|
|
||||||
// It's common for parent source maps to have pointers to lines that have no
|
|
||||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
|
||||||
if (line >= decoded.length)
|
|
||||||
return null;
|
|
||||||
const segments = decoded[line];
|
|
||||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
|
||||||
return index === -1 ? null : segments[index];
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
|
||||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
|
||||||
* `source-map` library.
|
|
||||||
*/
|
|
||||||
function originalPositionFor(map, needle) {
|
|
||||||
let { line, column, bias } = needle;
|
|
||||||
line--;
|
|
||||||
if (line < 0)
|
|
||||||
throw new Error(LINE_GTR_ZERO);
|
|
||||||
if (column < 0)
|
|
||||||
throw new Error(COL_GTR_EQ_ZERO);
|
|
||||||
const decoded = decodedMappings(map);
|
|
||||||
// It's common for parent source maps to have pointers to lines that have no
|
|
||||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
|
||||||
if (line >= decoded.length)
|
|
||||||
return OMapping(null, null, null, null);
|
|
||||||
const segments = decoded[line];
|
|
||||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
|
||||||
if (index === -1)
|
|
||||||
return OMapping(null, null, null, null);
|
|
||||||
const segment = segments[index];
|
|
||||||
if (segment.length === 1)
|
|
||||||
return OMapping(null, null, null, null);
|
|
||||||
const { names, resolvedSources } = map;
|
|
||||||
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Finds the generated line/column position of the provided source/line/column source position.
|
|
||||||
*/
|
|
||||||
function generatedPositionFor(map, needle) {
|
|
||||||
const { source, line, column, bias } = needle;
|
|
||||||
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Finds all generated line/column positions of the provided source/line/column source position.
|
|
||||||
*/
|
|
||||||
function allGeneratedPositionsFor(map, needle) {
|
|
||||||
const { source, line, column, bias } = needle;
|
|
||||||
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
|
||||||
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Iterates each mapping in generated position order.
|
|
||||||
*/
|
|
||||||
function eachMapping(map, cb) {
|
|
||||||
const decoded = decodedMappings(map);
|
|
||||||
const { names, resolvedSources } = map;
|
|
||||||
for (let i = 0; i < decoded.length; i++) {
|
|
||||||
const line = decoded[i];
|
|
||||||
for (let j = 0; j < line.length; j++) {
|
|
||||||
const seg = line[j];
|
|
||||||
const generatedLine = i + 1;
|
|
||||||
const generatedColumn = seg[0];
|
|
||||||
let source = null;
|
|
||||||
let originalLine = null;
|
|
||||||
let originalColumn = null;
|
|
||||||
let name = null;
|
|
||||||
if (seg.length !== 1) {
|
|
||||||
source = resolvedSources[seg[1]];
|
|
||||||
originalLine = seg[2] + 1;
|
|
||||||
originalColumn = seg[3];
|
|
||||||
}
|
|
||||||
if (seg.length === 5)
|
|
||||||
name = names[seg[4]];
|
|
||||||
cb({
|
|
||||||
generatedLine,
|
|
||||||
generatedColumn,
|
|
||||||
source,
|
|
||||||
originalLine,
|
|
||||||
originalColumn,
|
|
||||||
name,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function sourceIndex(map, source) {
|
|
||||||
const { sources, resolvedSources } = map;
|
|
||||||
let index = sources.indexOf(source);
|
|
||||||
if (index === -1)
|
|
||||||
index = resolvedSources.indexOf(source);
|
|
||||||
return index;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
|
||||||
*/
|
|
||||||
function sourceContentFor(map, source) {
|
|
||||||
const { sourcesContent } = map;
|
|
||||||
if (sourcesContent == null)
|
|
||||||
return null;
|
|
||||||
const index = sourceIndex(map, source);
|
|
||||||
return index === -1 ? null : sourcesContent[index];
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Determines if the source is marked to ignore by the source map.
|
|
||||||
*/
|
|
||||||
function isIgnored(map, source) {
|
|
||||||
const { ignoreList } = map;
|
|
||||||
if (ignoreList == null)
|
|
||||||
return false;
|
|
||||||
const index = sourceIndex(map, source);
|
|
||||||
return index === -1 ? false : ignoreList.includes(index);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
|
||||||
* maps.
|
|
||||||
*/
|
|
||||||
function presortedDecodedMap(map, mapUrl) {
|
|
||||||
const tracer = new TraceMap(clone(map, []), mapUrl);
|
|
||||||
cast(tracer)._decoded = map.mappings;
|
|
||||||
return tracer;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
|
||||||
* a sourcemap, or to JSON.stringify.
|
|
||||||
*/
|
|
||||||
function decodedMap(map) {
|
|
||||||
return clone(map, decodedMappings(map));
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
|
||||||
* a sourcemap, or to JSON.stringify.
|
|
||||||
*/
|
|
||||||
function encodedMap(map) {
|
|
||||||
return clone(map, encodedMappings(map));
|
|
||||||
}
|
|
||||||
function clone(map, mappings) {
|
|
||||||
return {
|
|
||||||
version: map.version,
|
|
||||||
file: map.file,
|
|
||||||
names: map.names,
|
|
||||||
sourceRoot: map.sourceRoot,
|
|
||||||
sources: map.sources,
|
|
||||||
sourcesContent: map.sourcesContent,
|
|
||||||
mappings,
|
|
||||||
ignoreList: map.ignoreList || map.x_google_ignoreList,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
function OMapping(source, line, column, name) {
|
|
||||||
return { source, line, column, name };
|
|
||||||
}
|
|
||||||
function GMapping(line, column) {
|
|
||||||
return { line, column };
|
|
||||||
}
|
|
||||||
function traceSegmentInternal(segments, memo, line, column, bias) {
|
|
||||||
let index = memoizedBinarySearch(segments, column, memo, line);
|
|
||||||
if (found) {
|
|
||||||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
|
||||||
}
|
|
||||||
else if (bias === LEAST_UPPER_BOUND)
|
|
||||||
index++;
|
|
||||||
if (index === -1 || index === segments.length)
|
|
||||||
return -1;
|
|
||||||
return index;
|
|
||||||
}
|
|
||||||
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
|
||||||
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
|
||||||
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
|
||||||
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
|
||||||
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
|
||||||
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
|
||||||
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
|
||||||
// match LEAST_UPPER_BOUND.
|
|
||||||
if (!found && bias === LEAST_UPPER_BOUND)
|
|
||||||
min++;
|
|
||||||
if (min === -1 || min === segments.length)
|
|
||||||
return [];
|
|
||||||
// We may have found the segment that started at an earlier column. If this is the case, then we
|
|
||||||
// need to slice all generated segments that match _that_ column, because all such segments span
|
|
||||||
// to our desired column.
|
|
||||||
const matchedColumn = found ? column : segments[min][COLUMN];
|
|
||||||
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
|
||||||
if (!found)
|
|
||||||
min = lowerBound(segments, matchedColumn, min);
|
|
||||||
const max = upperBound(segments, matchedColumn, min);
|
|
||||||
const result = [];
|
|
||||||
for (; min <= max; min++) {
|
|
||||||
const segment = segments[min];
|
|
||||||
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
function generatedPosition(map, source, line, column, bias, all) {
|
|
||||||
var _a;
|
|
||||||
line--;
|
|
||||||
if (line < 0)
|
|
||||||
throw new Error(LINE_GTR_ZERO);
|
|
||||||
if (column < 0)
|
|
||||||
throw new Error(COL_GTR_EQ_ZERO);
|
|
||||||
const { sources, resolvedSources } = map;
|
|
||||||
let sourceIndex = sources.indexOf(source);
|
|
||||||
if (sourceIndex === -1)
|
|
||||||
sourceIndex = resolvedSources.indexOf(source);
|
|
||||||
if (sourceIndex === -1)
|
|
||||||
return all ? [] : GMapping(null, null);
|
|
||||||
const generated = ((_a = cast(map))._bySources || (_a._bySources = buildBySources(decodedMappings(map), (cast(map)._bySourceMemos = sources.map(memoizedState)))));
|
|
||||||
const segments = generated[sourceIndex][line];
|
|
||||||
if (segments == null)
|
|
||||||
return all ? [] : GMapping(null, null);
|
|
||||||
const memo = cast(map)._bySourceMemos[sourceIndex];
|
|
||||||
if (all)
|
|
||||||
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
|
||||||
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
|
||||||
if (index === -1)
|
|
||||||
return GMapping(null, null);
|
|
||||||
const segment = segments[index];
|
|
||||||
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
|
||||||
}
|
|
||||||
|
|
||||||
export { AnyMap, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap, allGeneratedPositionsFor, decodedMap, decodedMappings, eachMapping, encodedMap, encodedMappings, generatedPositionFor, isIgnored, originalPositionFor, presortedDecodedMap, sourceContentFor, traceSegment };
|
|
||||||
//# sourceMappingURL=trace-mapping.mjs.map
|
|
||||||
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs.map
generated
vendored
File diff suppressed because one or more lines are too long
600
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
600
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js
generated
vendored
@ -1,600 +0,0 @@
|
|||||||
(function (global, factory) {
|
|
||||||
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/sourcemap-codec'), require('@jridgewell/resolve-uri')) :
|
|
||||||
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/sourcemap-codec', '@jridgewell/resolve-uri'], factory) :
|
|
||||||
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.traceMapping = {}, global.sourcemapCodec, global.resolveURI));
|
|
||||||
})(this, (function (exports, sourcemapCodec, resolveUri) { 'use strict';
|
|
||||||
|
|
||||||
function resolve(input, base) {
|
|
||||||
// The base is always treated as a directory, if it's not empty.
|
|
||||||
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
|
|
||||||
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
|
|
||||||
if (base && !base.endsWith('/'))
|
|
||||||
base += '/';
|
|
||||||
return resolveUri(input, base);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Removes everything after the last "/", but leaves the slash.
|
|
||||||
*/
|
|
||||||
function stripFilename(path) {
|
|
||||||
if (!path)
|
|
||||||
return '';
|
|
||||||
const index = path.lastIndexOf('/');
|
|
||||||
return path.slice(0, index + 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
const COLUMN = 0;
|
|
||||||
const SOURCES_INDEX = 1;
|
|
||||||
const SOURCE_LINE = 2;
|
|
||||||
const SOURCE_COLUMN = 3;
|
|
||||||
const NAMES_INDEX = 4;
|
|
||||||
const REV_GENERATED_LINE = 1;
|
|
||||||
const REV_GENERATED_COLUMN = 2;
|
|
||||||
|
|
||||||
function maybeSort(mappings, owned) {
|
|
||||||
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
|
||||||
if (unsortedIndex === mappings.length)
|
|
||||||
return mappings;
|
|
||||||
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
|
|
||||||
// not, we do not want to modify the consumer's input array.
|
|
||||||
if (!owned)
|
|
||||||
mappings = mappings.slice();
|
|
||||||
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
|
|
||||||
mappings[i] = sortSegments(mappings[i], owned);
|
|
||||||
}
|
|
||||||
return mappings;
|
|
||||||
}
|
|
||||||
function nextUnsortedSegmentLine(mappings, start) {
|
|
||||||
for (let i = start; i < mappings.length; i++) {
|
|
||||||
if (!isSorted(mappings[i]))
|
|
||||||
return i;
|
|
||||||
}
|
|
||||||
return mappings.length;
|
|
||||||
}
|
|
||||||
function isSorted(line) {
|
|
||||||
for (let j = 1; j < line.length; j++) {
|
|
||||||
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
function sortSegments(line, owned) {
|
|
||||||
if (!owned)
|
|
||||||
line = line.slice();
|
|
||||||
return line.sort(sortComparator);
|
|
||||||
}
|
|
||||||
function sortComparator(a, b) {
|
|
||||||
return a[COLUMN] - b[COLUMN];
|
|
||||||
}
|
|
||||||
|
|
||||||
let found = false;
|
|
||||||
/**
|
|
||||||
* A binary search implementation that returns the index if a match is found.
|
|
||||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
|
||||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
|
||||||
* the next index:
|
|
||||||
*
|
|
||||||
* ```js
|
|
||||||
* const array = [1, 3];
|
|
||||||
* const needle = 2;
|
|
||||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
|
||||||
*
|
|
||||||
* assert.equal(index, 0);
|
|
||||||
* array.splice(index + 1, 0, needle);
|
|
||||||
* assert.deepEqual(array, [1, 2, 3]);
|
|
||||||
* ```
|
|
||||||
*/
|
|
||||||
function binarySearch(haystack, needle, low, high) {
|
|
||||||
while (low <= high) {
|
|
||||||
const mid = low + ((high - low) >> 1);
|
|
||||||
const cmp = haystack[mid][COLUMN] - needle;
|
|
||||||
if (cmp === 0) {
|
|
||||||
found = true;
|
|
||||||
return mid;
|
|
||||||
}
|
|
||||||
if (cmp < 0) {
|
|
||||||
low = mid + 1;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
high = mid - 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
found = false;
|
|
||||||
return low - 1;
|
|
||||||
}
|
|
||||||
function upperBound(haystack, needle, index) {
|
|
||||||
for (let i = index + 1; i < haystack.length; index = i++) {
|
|
||||||
if (haystack[i][COLUMN] !== needle)
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
return index;
|
|
||||||
}
|
|
||||||
function lowerBound(haystack, needle, index) {
|
|
||||||
for (let i = index - 1; i >= 0; index = i--) {
|
|
||||||
if (haystack[i][COLUMN] !== needle)
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
return index;
|
|
||||||
}
|
|
||||||
function memoizedState() {
|
|
||||||
return {
|
|
||||||
lastKey: -1,
|
|
||||||
lastNeedle: -1,
|
|
||||||
lastIndex: -1,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
|
||||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
|
||||||
*/
|
|
||||||
function memoizedBinarySearch(haystack, needle, state, key) {
|
|
||||||
const { lastKey, lastNeedle, lastIndex } = state;
|
|
||||||
let low = 0;
|
|
||||||
let high = haystack.length - 1;
|
|
||||||
if (key === lastKey) {
|
|
||||||
if (needle === lastNeedle) {
|
|
||||||
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
|
|
||||||
return lastIndex;
|
|
||||||
}
|
|
||||||
if (needle >= lastNeedle) {
|
|
||||||
// lastIndex may be -1 if the previous needle was not found.
|
|
||||||
low = lastIndex === -1 ? 0 : lastIndex;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
high = lastIndex;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
state.lastKey = key;
|
|
||||||
state.lastNeedle = needle;
|
|
||||||
return (state.lastIndex = binarySearch(haystack, needle, low, high));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
|
|
||||||
// of generated line/column.
|
|
||||||
function buildBySources(decoded, memos) {
|
|
||||||
const sources = memos.map(buildNullArray);
|
|
||||||
for (let i = 0; i < decoded.length; i++) {
|
|
||||||
const line = decoded[i];
|
|
||||||
for (let j = 0; j < line.length; j++) {
|
|
||||||
const seg = line[j];
|
|
||||||
if (seg.length === 1)
|
|
||||||
continue;
|
|
||||||
const sourceIndex = seg[SOURCES_INDEX];
|
|
||||||
const sourceLine = seg[SOURCE_LINE];
|
|
||||||
const sourceColumn = seg[SOURCE_COLUMN];
|
|
||||||
const originalSource = sources[sourceIndex];
|
|
||||||
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
|
|
||||||
const memo = memos[sourceIndex];
|
|
||||||
// The binary search either found a match, or it found the left-index just before where the
|
|
||||||
// segment should go. Either way, we want to insert after that. And there may be multiple
|
|
||||||
// generated segments associated with an original location, so there may need to move several
|
|
||||||
// indexes before we find where we need to insert.
|
|
||||||
let index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
|
|
||||||
memo.lastIndex = ++index;
|
|
||||||
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return sources;
|
|
||||||
}
|
|
||||||
function insert(array, index, value) {
|
|
||||||
for (let i = array.length; i > index; i--) {
|
|
||||||
array[i] = array[i - 1];
|
|
||||||
}
|
|
||||||
array[index] = value;
|
|
||||||
}
|
|
||||||
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
|
|
||||||
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
|
|
||||||
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
|
|
||||||
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
|
|
||||||
// order when iterating with for-in.
|
|
||||||
function buildNullArray() {
|
|
||||||
return { __proto__: null };
|
|
||||||
}
|
|
||||||
|
|
||||||
const AnyMap = function (map, mapUrl) {
|
|
||||||
const parsed = parse(map);
|
|
||||||
if (!('sections' in parsed)) {
|
|
||||||
return new TraceMap(parsed, mapUrl);
|
|
||||||
}
|
|
||||||
const mappings = [];
|
|
||||||
const sources = [];
|
|
||||||
const sourcesContent = [];
|
|
||||||
const names = [];
|
|
||||||
const ignoreList = [];
|
|
||||||
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, ignoreList, 0, 0, Infinity, Infinity);
|
|
||||||
const joined = {
|
|
||||||
version: 3,
|
|
||||||
file: parsed.file,
|
|
||||||
names,
|
|
||||||
sources,
|
|
||||||
sourcesContent,
|
|
||||||
mappings,
|
|
||||||
ignoreList,
|
|
||||||
};
|
|
||||||
return presortedDecodedMap(joined);
|
|
||||||
};
|
|
||||||
function parse(map) {
|
|
||||||
return typeof map === 'string' ? JSON.parse(map) : map;
|
|
||||||
}
|
|
||||||
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
|
||||||
const { sections } = input;
|
|
||||||
for (let i = 0; i < sections.length; i++) {
|
|
||||||
const { map, offset } = sections[i];
|
|
||||||
let sl = stopLine;
|
|
||||||
let sc = stopColumn;
|
|
||||||
if (i + 1 < sections.length) {
|
|
||||||
const nextOffset = sections[i + 1].offset;
|
|
||||||
sl = Math.min(stopLine, lineOffset + nextOffset.line);
|
|
||||||
if (sl === stopLine) {
|
|
||||||
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
|
|
||||||
}
|
|
||||||
else if (sl < stopLine) {
|
|
||||||
sc = columnOffset + nextOffset.column;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
addSection(map, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, ignoreList, lineOffset, columnOffset, stopLine, stopColumn) {
|
|
||||||
const parsed = parse(input);
|
|
||||||
if ('sections' in parsed)
|
|
||||||
return recurse(...arguments);
|
|
||||||
const map = new TraceMap(parsed, mapUrl);
|
|
||||||
const sourcesOffset = sources.length;
|
|
||||||
const namesOffset = names.length;
|
|
||||||
const decoded = decodedMappings(map);
|
|
||||||
const { resolvedSources, sourcesContent: contents, ignoreList: ignores } = map;
|
|
||||||
append(sources, resolvedSources);
|
|
||||||
append(names, map.names);
|
|
||||||
if (contents)
|
|
||||||
append(sourcesContent, contents);
|
|
||||||
else
|
|
||||||
for (let i = 0; i < resolvedSources.length; i++)
|
|
||||||
sourcesContent.push(null);
|
|
||||||
if (ignores)
|
|
||||||
for (let i = 0; i < ignores.length; i++)
|
|
||||||
ignoreList.push(ignores[i] + sourcesOffset);
|
|
||||||
for (let i = 0; i < decoded.length; i++) {
|
|
||||||
const lineI = lineOffset + i;
|
|
||||||
// We can only add so many lines before we step into the range that the next section's map
|
|
||||||
// controls. When we get to the last line, then we'll start checking the segments to see if
|
|
||||||
// they've crossed into the column range. But it may not have any columns that overstep, so we
|
|
||||||
// still need to check that we don't overstep lines, too.
|
|
||||||
if (lineI > stopLine)
|
|
||||||
return;
|
|
||||||
// The out line may already exist in mappings (if we're continuing the line started by a
|
|
||||||
// previous section). Or, we may have jumped ahead several lines to start this section.
|
|
||||||
const out = getLine(mappings, lineI);
|
|
||||||
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
|
|
||||||
// map can be multiple lines), it doesn't.
|
|
||||||
const cOffset = i === 0 ? columnOffset : 0;
|
|
||||||
const line = decoded[i];
|
|
||||||
for (let j = 0; j < line.length; j++) {
|
|
||||||
const seg = line[j];
|
|
||||||
const column = cOffset + seg[COLUMN];
|
|
||||||
// If this segment steps into the column range that the next section's map controls, we need
|
|
||||||
// to stop early.
|
|
||||||
if (lineI === stopLine && column >= stopColumn)
|
|
||||||
return;
|
|
||||||
if (seg.length === 1) {
|
|
||||||
out.push([column]);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
|
|
||||||
const sourceLine = seg[SOURCE_LINE];
|
|
||||||
const sourceColumn = seg[SOURCE_COLUMN];
|
|
||||||
out.push(seg.length === 4
|
|
||||||
? [column, sourcesIndex, sourceLine, sourceColumn]
|
|
||||||
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function append(arr, other) {
|
|
||||||
for (let i = 0; i < other.length; i++)
|
|
||||||
arr.push(other[i]);
|
|
||||||
}
|
|
||||||
function getLine(arr, index) {
|
|
||||||
for (let i = arr.length; i <= index; i++)
|
|
||||||
arr[i] = [];
|
|
||||||
return arr[index];
|
|
||||||
}
|
|
||||||
|
|
||||||
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
|
|
||||||
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
|
|
||||||
const LEAST_UPPER_BOUND = -1;
|
|
||||||
const GREATEST_LOWER_BOUND = 1;
|
|
||||||
class TraceMap {
|
|
||||||
constructor(map, mapUrl) {
|
|
||||||
const isString = typeof map === 'string';
|
|
||||||
if (!isString && map._decodedMemo)
|
|
||||||
return map;
|
|
||||||
const parsed = (isString ? JSON.parse(map) : map);
|
|
||||||
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
|
||||||
this.version = version;
|
|
||||||
this.file = file;
|
|
||||||
this.names = names || [];
|
|
||||||
this.sourceRoot = sourceRoot;
|
|
||||||
this.sources = sources;
|
|
||||||
this.sourcesContent = sourcesContent;
|
|
||||||
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
|
|
||||||
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
|
|
||||||
this.resolvedSources = sources.map((s) => resolve(s || '', from));
|
|
||||||
const { mappings } = parsed;
|
|
||||||
if (typeof mappings === 'string') {
|
|
||||||
this._encoded = mappings;
|
|
||||||
this._decoded = undefined;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
this._encoded = undefined;
|
|
||||||
this._decoded = maybeSort(mappings, isString);
|
|
||||||
}
|
|
||||||
this._decodedMemo = memoizedState();
|
|
||||||
this._bySources = undefined;
|
|
||||||
this._bySourceMemos = undefined;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
|
|
||||||
* with public access modifiers.
|
|
||||||
*/
|
|
||||||
function cast(map) {
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
|
||||||
*/
|
|
||||||
function encodedMappings(map) {
|
|
||||||
var _a;
|
|
||||||
var _b;
|
|
||||||
return ((_a = (_b = cast(map))._encoded) !== null && _a !== void 0 ? _a : (_b._encoded = sourcemapCodec.encode(cast(map)._decoded)));
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
|
||||||
*/
|
|
||||||
function decodedMappings(map) {
|
|
||||||
var _a;
|
|
||||||
return ((_a = cast(map))._decoded || (_a._decoded = sourcemapCodec.decode(cast(map)._encoded)));
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
|
||||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
|
||||||
*/
|
|
||||||
function traceSegment(map, line, column) {
|
|
||||||
const decoded = decodedMappings(map);
|
|
||||||
// It's common for parent source maps to have pointers to lines that have no
|
|
||||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
|
||||||
if (line >= decoded.length)
|
|
||||||
return null;
|
|
||||||
const segments = decoded[line];
|
|
||||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
|
||||||
return index === -1 ? null : segments[index];
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
|
||||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
|
||||||
* `source-map` library.
|
|
||||||
*/
|
|
||||||
function originalPositionFor(map, needle) {
|
|
||||||
let { line, column, bias } = needle;
|
|
||||||
line--;
|
|
||||||
if (line < 0)
|
|
||||||
throw new Error(LINE_GTR_ZERO);
|
|
||||||
if (column < 0)
|
|
||||||
throw new Error(COL_GTR_EQ_ZERO);
|
|
||||||
const decoded = decodedMappings(map);
|
|
||||||
// It's common for parent source maps to have pointers to lines that have no
|
|
||||||
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
|
|
||||||
if (line >= decoded.length)
|
|
||||||
return OMapping(null, null, null, null);
|
|
||||||
const segments = decoded[line];
|
|
||||||
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
|
|
||||||
if (index === -1)
|
|
||||||
return OMapping(null, null, null, null);
|
|
||||||
const segment = segments[index];
|
|
||||||
if (segment.length === 1)
|
|
||||||
return OMapping(null, null, null, null);
|
|
||||||
const { names, resolvedSources } = map;
|
|
||||||
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Finds the generated line/column position of the provided source/line/column source position.
|
|
||||||
*/
|
|
||||||
function generatedPositionFor(map, needle) {
|
|
||||||
const { source, line, column, bias } = needle;
|
|
||||||
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Finds all generated line/column positions of the provided source/line/column source position.
|
|
||||||
*/
|
|
||||||
function allGeneratedPositionsFor(map, needle) {
|
|
||||||
const { source, line, column, bias } = needle;
|
|
||||||
// SourceMapConsumer uses LEAST_UPPER_BOUND for some reason, so we follow suit.
|
|
||||||
return generatedPosition(map, source, line, column, bias || LEAST_UPPER_BOUND, true);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Iterates each mapping in generated position order.
|
|
||||||
*/
|
|
||||||
function eachMapping(map, cb) {
|
|
||||||
const decoded = decodedMappings(map);
|
|
||||||
const { names, resolvedSources } = map;
|
|
||||||
for (let i = 0; i < decoded.length; i++) {
|
|
||||||
const line = decoded[i];
|
|
||||||
for (let j = 0; j < line.length; j++) {
|
|
||||||
const seg = line[j];
|
|
||||||
const generatedLine = i + 1;
|
|
||||||
const generatedColumn = seg[0];
|
|
||||||
let source = null;
|
|
||||||
let originalLine = null;
|
|
||||||
let originalColumn = null;
|
|
||||||
let name = null;
|
|
||||||
if (seg.length !== 1) {
|
|
||||||
source = resolvedSources[seg[1]];
|
|
||||||
originalLine = seg[2] + 1;
|
|
||||||
originalColumn = seg[3];
|
|
||||||
}
|
|
||||||
if (seg.length === 5)
|
|
||||||
name = names[seg[4]];
|
|
||||||
cb({
|
|
||||||
generatedLine,
|
|
||||||
generatedColumn,
|
|
||||||
source,
|
|
||||||
originalLine,
|
|
||||||
originalColumn,
|
|
||||||
name,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function sourceIndex(map, source) {
|
|
||||||
const { sources, resolvedSources } = map;
|
|
||||||
let index = sources.indexOf(source);
|
|
||||||
if (index === -1)
|
|
||||||
index = resolvedSources.indexOf(source);
|
|
||||||
return index;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
|
||||||
*/
|
|
||||||
function sourceContentFor(map, source) {
|
|
||||||
const { sourcesContent } = map;
|
|
||||||
if (sourcesContent == null)
|
|
||||||
return null;
|
|
||||||
const index = sourceIndex(map, source);
|
|
||||||
return index === -1 ? null : sourcesContent[index];
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Determines if the source is marked to ignore by the source map.
|
|
||||||
*/
|
|
||||||
function isIgnored(map, source) {
|
|
||||||
const { ignoreList } = map;
|
|
||||||
if (ignoreList == null)
|
|
||||||
return false;
|
|
||||||
const index = sourceIndex(map, source);
|
|
||||||
return index === -1 ? false : ignoreList.includes(index);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
|
||||||
* maps.
|
|
||||||
*/
|
|
||||||
function presortedDecodedMap(map, mapUrl) {
|
|
||||||
const tracer = new TraceMap(clone(map, []), mapUrl);
|
|
||||||
cast(tracer)._decoded = map.mappings;
|
|
||||||
return tracer;
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
|
||||||
* a sourcemap, or to JSON.stringify.
|
|
||||||
*/
|
|
||||||
function decodedMap(map) {
|
|
||||||
return clone(map, decodedMappings(map));
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
|
||||||
* a sourcemap, or to JSON.stringify.
|
|
||||||
*/
|
|
||||||
function encodedMap(map) {
|
|
||||||
return clone(map, encodedMappings(map));
|
|
||||||
}
|
|
||||||
function clone(map, mappings) {
|
|
||||||
return {
|
|
||||||
version: map.version,
|
|
||||||
file: map.file,
|
|
||||||
names: map.names,
|
|
||||||
sourceRoot: map.sourceRoot,
|
|
||||||
sources: map.sources,
|
|
||||||
sourcesContent: map.sourcesContent,
|
|
||||||
mappings,
|
|
||||||
ignoreList: map.ignoreList || map.x_google_ignoreList,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
function OMapping(source, line, column, name) {
|
|
||||||
return { source, line, column, name };
|
|
||||||
}
|
|
||||||
function GMapping(line, column) {
|
|
||||||
return { line, column };
|
|
||||||
}
|
|
||||||
function traceSegmentInternal(segments, memo, line, column, bias) {
|
|
||||||
let index = memoizedBinarySearch(segments, column, memo, line);
|
|
||||||
if (found) {
|
|
||||||
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
|
||||||
}
|
|
||||||
else if (bias === LEAST_UPPER_BOUND)
|
|
||||||
index++;
|
|
||||||
if (index === -1 || index === segments.length)
|
|
||||||
return -1;
|
|
||||||
return index;
|
|
||||||
}
|
|
||||||
function sliceGeneratedPositions(segments, memo, line, column, bias) {
|
|
||||||
let min = traceSegmentInternal(segments, memo, line, column, GREATEST_LOWER_BOUND);
|
|
||||||
// We ignored the bias when tracing the segment so that we're guarnateed to find the first (in
|
|
||||||
// insertion order) segment that matched. Even if we did respect the bias when tracing, we would
|
|
||||||
// still need to call `lowerBound()` to find the first segment, which is slower than just looking
|
|
||||||
// for the GREATEST_LOWER_BOUND to begin with. The only difference that matters for us is when the
|
|
||||||
// binary search didn't match, in which case GREATEST_LOWER_BOUND just needs to increment to
|
|
||||||
// match LEAST_UPPER_BOUND.
|
|
||||||
if (!found && bias === LEAST_UPPER_BOUND)
|
|
||||||
min++;
|
|
||||||
if (min === -1 || min === segments.length)
|
|
||||||
return [];
|
|
||||||
// We may have found the segment that started at an earlier column. If this is the case, then we
|
|
||||||
// need to slice all generated segments that match _that_ column, because all such segments span
|
|
||||||
// to our desired column.
|
|
||||||
const matchedColumn = found ? column : segments[min][COLUMN];
|
|
||||||
// The binary search is not guaranteed to find the lower bound when a match wasn't found.
|
|
||||||
if (!found)
|
|
||||||
min = lowerBound(segments, matchedColumn, min);
|
|
||||||
const max = upperBound(segments, matchedColumn, min);
|
|
||||||
const result = [];
|
|
||||||
for (; min <= max; min++) {
|
|
||||||
const segment = segments[min];
|
|
||||||
result.push(GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]));
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
function generatedPosition(map, source, line, column, bias, all) {
|
|
||||||
var _a;
|
|
||||||
line--;
|
|
||||||
if (line < 0)
|
|
||||||
throw new Error(LINE_GTR_ZERO);
|
|
||||||
if (column < 0)
|
|
||||||
throw new Error(COL_GTR_EQ_ZERO);
|
|
||||||
const { sources, resolvedSources } = map;
|
|
||||||
let sourceIndex = sources.indexOf(source);
|
|
||||||
if (sourceIndex === -1)
|
|
||||||
sourceIndex = resolvedSources.indexOf(source);
|
|
||||||
if (sourceIndex === -1)
|
|
||||||
return all ? [] : GMapping(null, null);
|
|
||||||
const generated = ((_a = cast(map))._bySources || (_a._bySources = buildBySources(decodedMappings(map), (cast(map)._bySourceMemos = sources.map(memoizedState)))));
|
|
||||||
const segments = generated[sourceIndex][line];
|
|
||||||
if (segments == null)
|
|
||||||
return all ? [] : GMapping(null, null);
|
|
||||||
const memo = cast(map)._bySourceMemos[sourceIndex];
|
|
||||||
if (all)
|
|
||||||
return sliceGeneratedPositions(segments, memo, line, column, bias);
|
|
||||||
const index = traceSegmentInternal(segments, memo, line, column, bias);
|
|
||||||
if (index === -1)
|
|
||||||
return GMapping(null, null);
|
|
||||||
const segment = segments[index];
|
|
||||||
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.AnyMap = AnyMap;
|
|
||||||
exports.GREATEST_LOWER_BOUND = GREATEST_LOWER_BOUND;
|
|
||||||
exports.LEAST_UPPER_BOUND = LEAST_UPPER_BOUND;
|
|
||||||
exports.TraceMap = TraceMap;
|
|
||||||
exports.allGeneratedPositionsFor = allGeneratedPositionsFor;
|
|
||||||
exports.decodedMap = decodedMap;
|
|
||||||
exports.decodedMappings = decodedMappings;
|
|
||||||
exports.eachMapping = eachMapping;
|
|
||||||
exports.encodedMap = encodedMap;
|
|
||||||
exports.encodedMappings = encodedMappings;
|
|
||||||
exports.generatedPositionFor = generatedPositionFor;
|
|
||||||
exports.isIgnored = isIgnored;
|
|
||||||
exports.originalPositionFor = originalPositionFor;
|
|
||||||
exports.presortedDecodedMap = presortedDecodedMap;
|
|
||||||
exports.sourceContentFor = sourceContentFor;
|
|
||||||
exports.traceSegment = traceSegment;
|
|
||||||
|
|
||||||
}));
|
|
||||||
//# sourceMappingURL=trace-mapping.umd.js.map
|
|
||||||
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
1
node_modules/@jridgewell/trace-mapping/dist/trace-mapping.umd.js.map
generated
vendored
File diff suppressed because one or more lines are too long
8
node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
8
node_modules/@jridgewell/trace-mapping/dist/types/any-map.d.ts
generated
vendored
@ -1,8 +0,0 @@
|
|||||||
import { TraceMap } from './trace-mapping';
|
|
||||||
import type { SectionedSourceMapInput } from './types';
|
|
||||||
type AnyMap = {
|
|
||||||
new (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
|
||||||
(map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
|
|
||||||
};
|
|
||||||
export declare const AnyMap: AnyMap;
|
|
||||||
export {};
|
|
||||||
32
node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
32
node_modules/@jridgewell/trace-mapping/dist/types/binary-search.d.ts
generated
vendored
@ -1,32 +0,0 @@
|
|||||||
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';
|
|
||||||
export type MemoState = {
|
|
||||||
lastKey: number;
|
|
||||||
lastNeedle: number;
|
|
||||||
lastIndex: number;
|
|
||||||
};
|
|
||||||
export declare let found: boolean;
|
|
||||||
/**
|
|
||||||
* A binary search implementation that returns the index if a match is found.
|
|
||||||
* If no match is found, then the left-index (the index associated with the item that comes just
|
|
||||||
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
|
|
||||||
* the next index:
|
|
||||||
*
|
|
||||||
* ```js
|
|
||||||
* const array = [1, 3];
|
|
||||||
* const needle = 2;
|
|
||||||
* const index = binarySearch(array, needle, (item, needle) => item - needle);
|
|
||||||
*
|
|
||||||
* assert.equal(index, 0);
|
|
||||||
* array.splice(index + 1, 0, needle);
|
|
||||||
* assert.deepEqual(array, [1, 2, 3]);
|
|
||||||
* ```
|
|
||||||
*/
|
|
||||||
export declare function binarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, low: number, high: number): number;
|
|
||||||
export declare function upperBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
|
||||||
export declare function lowerBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
|
|
||||||
export declare function memoizedState(): MemoState;
|
|
||||||
/**
|
|
||||||
* This overly complicated beast is just to record the last tested line/column and the resulting
|
|
||||||
* index, allowing us to skip a few tests if mappings are monotonically increasing.
|
|
||||||
*/
|
|
||||||
export declare function memoizedBinarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, state: MemoState, key: number): number;
|
|
||||||
7
node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
7
node_modules/@jridgewell/trace-mapping/dist/types/by-source.d.ts
generated
vendored
@ -1,7 +0,0 @@
|
|||||||
import type { ReverseSegment, SourceMapSegment } from './sourcemap-segment';
|
|
||||||
import type { MemoState } from './binary-search';
|
|
||||||
export type Source = {
|
|
||||||
__proto__: null;
|
|
||||||
[line: number]: Exclude<ReverseSegment, [number]>[];
|
|
||||||
};
|
|
||||||
export default function buildBySources(decoded: readonly SourceMapSegment[][], memos: MemoState[]): Source[];
|
|
||||||
1
node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts
generated
vendored
1
node_modules/@jridgewell/trace-mapping/dist/types/resolve.d.ts
generated
vendored
@ -1 +0,0 @@
|
|||||||
export default function resolve(input: string, base: string | undefined): string;
|
|
||||||
2
node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts
generated
vendored
2
node_modules/@jridgewell/trace-mapping/dist/types/sort.d.ts
generated
vendored
@ -1,2 +0,0 @@
|
|||||||
import type { SourceMapSegment } from './sourcemap-segment';
|
|
||||||
export default function maybeSort(mappings: SourceMapSegment[][], owned: boolean): SourceMapSegment[][];
|
|
||||||
16
node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
16
node_modules/@jridgewell/trace-mapping/dist/types/sourcemap-segment.d.ts
generated
vendored
@ -1,16 +0,0 @@
|
|||||||
type GeneratedColumn = number;
|
|
||||||
type SourcesIndex = number;
|
|
||||||
type SourceLine = number;
|
|
||||||
type SourceColumn = number;
|
|
||||||
type NamesIndex = number;
|
|
||||||
type GeneratedLine = number;
|
|
||||||
export type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
|
|
||||||
export type ReverseSegment = [SourceColumn, GeneratedLine, GeneratedColumn];
|
|
||||||
export declare const COLUMN = 0;
|
|
||||||
export declare const SOURCES_INDEX = 1;
|
|
||||||
export declare const SOURCE_LINE = 2;
|
|
||||||
export declare const SOURCE_COLUMN = 3;
|
|
||||||
export declare const NAMES_INDEX = 4;
|
|
||||||
export declare const REV_GENERATED_LINE = 1;
|
|
||||||
export declare const REV_GENERATED_COLUMN = 2;
|
|
||||||
export {};
|
|
||||||
4
node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
4
node_modules/@jridgewell/trace-mapping/dist/types/strip-filename.d.ts
generated
vendored
@ -1,4 +0,0 @@
|
|||||||
/**
|
|
||||||
* Removes everything after the last "/", but leaves the slash.
|
|
||||||
*/
|
|
||||||
export default function stripFilename(path: string | undefined | null): string;
|
|
||||||
79
node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
79
node_modules/@jridgewell/trace-mapping/dist/types/trace-mapping.d.ts
generated
vendored
@ -1,79 +0,0 @@
|
|||||||
import type { SourceMapSegment } from './sourcemap-segment';
|
|
||||||
import type { SourceMapV3, DecodedSourceMap, EncodedSourceMap, InvalidOriginalMapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, SourceMapInput, Needle, SourceNeedle, SourceMap, EachMapping } from './types';
|
|
||||||
export type { SourceMapSegment } from './sourcemap-segment';
|
|
||||||
export type { SourceMap, DecodedSourceMap, EncodedSourceMap, Section, SectionedSourceMap, SourceMapV3, Bias, EachMapping, GeneratedMapping, InvalidGeneratedMapping, InvalidOriginalMapping, Needle, OriginalMapping, OriginalMapping as Mapping, SectionedSourceMapInput, SourceMapInput, SourceNeedle, XInput, EncodedSourceMapXInput, DecodedSourceMapXInput, SectionedSourceMapXInput, SectionXInput, } from './types';
|
|
||||||
export declare const LEAST_UPPER_BOUND = -1;
|
|
||||||
export declare const GREATEST_LOWER_BOUND = 1;
|
|
||||||
export { AnyMap } from './any-map';
|
|
||||||
export declare class TraceMap implements SourceMap {
|
|
||||||
version: SourceMapV3['version'];
|
|
||||||
file: SourceMapV3['file'];
|
|
||||||
names: SourceMapV3['names'];
|
|
||||||
sourceRoot: SourceMapV3['sourceRoot'];
|
|
||||||
sources: SourceMapV3['sources'];
|
|
||||||
sourcesContent: SourceMapV3['sourcesContent'];
|
|
||||||
ignoreList: SourceMapV3['ignoreList'];
|
|
||||||
resolvedSources: string[];
|
|
||||||
private _encoded;
|
|
||||||
private _decoded;
|
|
||||||
private _decodedMemo;
|
|
||||||
private _bySources;
|
|
||||||
private _bySourceMemos;
|
|
||||||
constructor(map: SourceMapInput, mapUrl?: string | null);
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
|
|
||||||
*/
|
|
||||||
export declare function encodedMappings(map: TraceMap): EncodedSourceMap['mappings'];
|
|
||||||
/**
|
|
||||||
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
|
|
||||||
*/
|
|
||||||
export declare function decodedMappings(map: TraceMap): Readonly<DecodedSourceMap['mappings']>;
|
|
||||||
/**
|
|
||||||
* A low-level API to find the segment associated with a generated line/column (think, from a
|
|
||||||
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
|
|
||||||
*/
|
|
||||||
export declare function traceSegment(map: TraceMap, line: number, column: number): Readonly<SourceMapSegment> | null;
|
|
||||||
/**
|
|
||||||
* A higher-level API to find the source/line/column associated with a generated line/column
|
|
||||||
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
|
|
||||||
* `source-map` library.
|
|
||||||
*/
|
|
||||||
export declare function originalPositionFor(map: TraceMap, needle: Needle): OriginalMapping | InvalidOriginalMapping;
|
|
||||||
/**
|
|
||||||
* Finds the generated line/column position of the provided source/line/column source position.
|
|
||||||
*/
|
|
||||||
export declare function generatedPositionFor(map: TraceMap, needle: SourceNeedle): GeneratedMapping | InvalidGeneratedMapping;
|
|
||||||
/**
|
|
||||||
* Finds all generated line/column positions of the provided source/line/column source position.
|
|
||||||
*/
|
|
||||||
export declare function allGeneratedPositionsFor(map: TraceMap, needle: SourceNeedle): GeneratedMapping[];
|
|
||||||
/**
|
|
||||||
* Iterates each mapping in generated position order.
|
|
||||||
*/
|
|
||||||
export declare function eachMapping(map: TraceMap, cb: (mapping: EachMapping) => void): void;
|
|
||||||
/**
|
|
||||||
* Retrieves the source content for a particular source, if its found. Returns null if not.
|
|
||||||
*/
|
|
||||||
export declare function sourceContentFor(map: TraceMap, source: string): string | null;
|
|
||||||
/**
|
|
||||||
* Determines if the source is marked to ignore by the source map.
|
|
||||||
*/
|
|
||||||
export declare function isIgnored(map: TraceMap, source: string): boolean;
|
|
||||||
/**
|
|
||||||
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
|
|
||||||
* maps.
|
|
||||||
*/
|
|
||||||
export declare function presortedDecodedMap(map: DecodedSourceMap, mapUrl?: string): TraceMap;
|
|
||||||
/**
|
|
||||||
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
|
|
||||||
* a sourcemap, or to JSON.stringify.
|
|
||||||
*/
|
|
||||||
export declare function decodedMap(map: TraceMap): Omit<DecodedSourceMap, 'mappings'> & {
|
|
||||||
mappings: readonly SourceMapSegment[][];
|
|
||||||
};
|
|
||||||
/**
|
|
||||||
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
|
|
||||||
* a sourcemap, or to JSON.stringify.
|
|
||||||
*/
|
|
||||||
export declare function encodedMap(map: TraceMap): EncodedSourceMap;
|
|
||||||
99
node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
99
node_modules/@jridgewell/trace-mapping/dist/types/types.d.ts
generated
vendored
@ -1,99 +0,0 @@
|
|||||||
import type { SourceMapSegment } from './sourcemap-segment';
|
|
||||||
import type { GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap } from './trace-mapping';
|
|
||||||
export interface SourceMapV3 {
|
|
||||||
file?: string | null;
|
|
||||||
names: string[];
|
|
||||||
sourceRoot?: string;
|
|
||||||
sources: (string | null)[];
|
|
||||||
sourcesContent?: (string | null)[];
|
|
||||||
version: 3;
|
|
||||||
ignoreList?: number[];
|
|
||||||
}
|
|
||||||
export interface EncodedSourceMap extends SourceMapV3 {
|
|
||||||
mappings: string;
|
|
||||||
}
|
|
||||||
export interface DecodedSourceMap extends SourceMapV3 {
|
|
||||||
mappings: SourceMapSegment[][];
|
|
||||||
}
|
|
||||||
export interface Section {
|
|
||||||
offset: {
|
|
||||||
line: number;
|
|
||||||
column: number;
|
|
||||||
};
|
|
||||||
map: EncodedSourceMap | DecodedSourceMap | SectionedSourceMap;
|
|
||||||
}
|
|
||||||
export interface SectionedSourceMap {
|
|
||||||
file?: string | null;
|
|
||||||
sections: Section[];
|
|
||||||
version: 3;
|
|
||||||
}
|
|
||||||
export type OriginalMapping = {
|
|
||||||
source: string | null;
|
|
||||||
line: number;
|
|
||||||
column: number;
|
|
||||||
name: string | null;
|
|
||||||
};
|
|
||||||
export type InvalidOriginalMapping = {
|
|
||||||
source: null;
|
|
||||||
line: null;
|
|
||||||
column: null;
|
|
||||||
name: null;
|
|
||||||
};
|
|
||||||
export type GeneratedMapping = {
|
|
||||||
line: number;
|
|
||||||
column: number;
|
|
||||||
};
|
|
||||||
export type InvalidGeneratedMapping = {
|
|
||||||
line: null;
|
|
||||||
column: null;
|
|
||||||
};
|
|
||||||
export type Bias = typeof GREATEST_LOWER_BOUND | typeof LEAST_UPPER_BOUND;
|
|
||||||
export type XInput = {
|
|
||||||
x_google_ignoreList?: SourceMapV3['ignoreList'];
|
|
||||||
};
|
|
||||||
export type EncodedSourceMapXInput = EncodedSourceMap & XInput;
|
|
||||||
export type DecodedSourceMapXInput = DecodedSourceMap & XInput;
|
|
||||||
export type SectionedSourceMapXInput = Omit<SectionedSourceMap, 'sections'> & {
|
|
||||||
sections: SectionXInput[];
|
|
||||||
};
|
|
||||||
export type SectionXInput = Omit<Section, 'map'> & {
|
|
||||||
map: SectionedSourceMapInput;
|
|
||||||
};
|
|
||||||
export type SourceMapInput = string | EncodedSourceMapXInput | DecodedSourceMapXInput | TraceMap;
|
|
||||||
export type SectionedSourceMapInput = SourceMapInput | SectionedSourceMapXInput;
|
|
||||||
export type Needle = {
|
|
||||||
line: number;
|
|
||||||
column: number;
|
|
||||||
bias?: Bias;
|
|
||||||
};
|
|
||||||
export type SourceNeedle = {
|
|
||||||
source: string;
|
|
||||||
line: number;
|
|
||||||
column: number;
|
|
||||||
bias?: Bias;
|
|
||||||
};
|
|
||||||
export type EachMapping = {
|
|
||||||
generatedLine: number;
|
|
||||||
generatedColumn: number;
|
|
||||||
source: null;
|
|
||||||
originalLine: null;
|
|
||||||
originalColumn: null;
|
|
||||||
name: null;
|
|
||||||
} | {
|
|
||||||
generatedLine: number;
|
|
||||||
generatedColumn: number;
|
|
||||||
source: string | null;
|
|
||||||
originalLine: number;
|
|
||||||
originalColumn: number;
|
|
||||||
name: string | null;
|
|
||||||
};
|
|
||||||
export declare abstract class SourceMap {
|
|
||||||
version: SourceMapV3['version'];
|
|
||||||
file: SourceMapV3['file'];
|
|
||||||
names: SourceMapV3['names'];
|
|
||||||
sourceRoot: SourceMapV3['sourceRoot'];
|
|
||||||
sources: SourceMapV3['sources'];
|
|
||||||
sourcesContent: SourceMapV3['sourcesContent'];
|
|
||||||
resolvedSources: SourceMapV3['sources'];
|
|
||||||
ignoreList: SourceMapV3['ignoreList'];
|
|
||||||
}
|
|
||||||
77
node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
77
node_modules/@jridgewell/trace-mapping/package.json
generated
vendored
@ -1,77 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@jridgewell/trace-mapping",
|
|
||||||
"version": "0.3.25",
|
|
||||||
"description": "Trace the original position through a source map",
|
|
||||||
"keywords": [
|
|
||||||
"source",
|
|
||||||
"map"
|
|
||||||
],
|
|
||||||
"main": "dist/trace-mapping.umd.js",
|
|
||||||
"module": "dist/trace-mapping.mjs",
|
|
||||||
"types": "dist/types/trace-mapping.d.ts",
|
|
||||||
"files": [
|
|
||||||
"dist"
|
|
||||||
],
|
|
||||||
"exports": {
|
|
||||||
".": [
|
|
||||||
{
|
|
||||||
"types": "./dist/types/trace-mapping.d.ts",
|
|
||||||
"browser": "./dist/trace-mapping.umd.js",
|
|
||||||
"require": "./dist/trace-mapping.umd.js",
|
|
||||||
"import": "./dist/trace-mapping.mjs"
|
|
||||||
},
|
|
||||||
"./dist/trace-mapping.umd.js"
|
|
||||||
],
|
|
||||||
"./package.json": "./package.json"
|
|
||||||
},
|
|
||||||
"author": "Justin Ridgewell <justin@ridgewell.name>",
|
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "git+https://github.com/jridgewell/trace-mapping.git"
|
|
||||||
},
|
|
||||||
"license": "MIT",
|
|
||||||
"scripts": {
|
|
||||||
"benchmark": "run-s build:rollup benchmark:*",
|
|
||||||
"benchmark:install": "cd benchmark && npm install",
|
|
||||||
"benchmark:only": "node --expose-gc benchmark/index.mjs",
|
|
||||||
"build": "run-s -n build:*",
|
|
||||||
"build:rollup": "rollup -c rollup.config.mjs",
|
|
||||||
"build:ts": "tsc --project tsconfig.build.json",
|
|
||||||
"lint": "run-s -n lint:*",
|
|
||||||
"lint:prettier": "npm run test:lint:prettier -- --write",
|
|
||||||
"lint:ts": "npm run test:lint:ts -- --fix",
|
|
||||||
"prebuild": "rm -rf dist",
|
|
||||||
"prepublishOnly": "npm run preversion",
|
|
||||||
"preversion": "run-s test build",
|
|
||||||
"test": "run-s -n test:lint test:only",
|
|
||||||
"test:debug": "mocha --inspect-brk",
|
|
||||||
"test:lint": "run-s -n test:lint:*",
|
|
||||||
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts' '**/*.md'",
|
|
||||||
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
|
|
||||||
"test:only": "c8 mocha",
|
|
||||||
"test:watch": "mocha --watch"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@rollup/plugin-typescript": "11.1.6",
|
|
||||||
"@types/mocha": "10.0.6",
|
|
||||||
"@types/node": "20.11.20",
|
|
||||||
"@typescript-eslint/eslint-plugin": "6.18.1",
|
|
||||||
"@typescript-eslint/parser": "6.18.1",
|
|
||||||
"benchmark": "2.1.4",
|
|
||||||
"c8": "9.0.0",
|
|
||||||
"esbuild": "0.19.11",
|
|
||||||
"eslint": "8.56.0",
|
|
||||||
"eslint-config-prettier": "9.1.0",
|
|
||||||
"eslint-plugin-no-only-tests": "3.1.0",
|
|
||||||
"mocha": "10.3.0",
|
|
||||||
"npm-run-all": "4.1.5",
|
|
||||||
"prettier": "3.1.1",
|
|
||||||
"rollup": "4.9.4",
|
|
||||||
"tsx": "4.7.0",
|
|
||||||
"typescript": "5.3.3"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@jridgewell/resolve-uri": "^3.1.0",
|
|
||||||
"@jridgewell/sourcemap-codec": "^1.4.14"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
21
node_modules/@parcel/watcher-darwin-arm64/LICENSE
generated
vendored
21
node_modules/@parcel/watcher-darwin-arm64/LICENSE
generated
vendored
@ -1,21 +0,0 @@
|
|||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2017-present Devon Govett
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
1
node_modules/@parcel/watcher-darwin-arm64/README.md
generated
vendored
1
node_modules/@parcel/watcher-darwin-arm64/README.md
generated
vendored
@ -1 +0,0 @@
|
|||||||
This is the darwin-arm64 build of @parcel/watcher. See https://github.com/parcel-bundler/watcher for details.
|
|
||||||
30
node_modules/@parcel/watcher-darwin-arm64/package.json
generated
vendored
30
node_modules/@parcel/watcher-darwin-arm64/package.json
generated
vendored
@ -1,30 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@parcel/watcher-darwin-arm64",
|
|
||||||
"version": "2.5.1",
|
|
||||||
"main": "watcher.node",
|
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "https://github.com/parcel-bundler/watcher.git"
|
|
||||||
},
|
|
||||||
"description": "A native C++ Node module for querying and subscribing to filesystem events. Used by Parcel 2.",
|
|
||||||
"license": "MIT",
|
|
||||||
"publishConfig": {
|
|
||||||
"access": "public"
|
|
||||||
},
|
|
||||||
"funding": {
|
|
||||||
"type": "opencollective",
|
|
||||||
"url": "https://opencollective.com/parcel"
|
|
||||||
},
|
|
||||||
"files": [
|
|
||||||
"watcher.node"
|
|
||||||
],
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 10.0.0"
|
|
||||||
},
|
|
||||||
"os": [
|
|
||||||
"darwin"
|
|
||||||
],
|
|
||||||
"cpu": [
|
|
||||||
"arm64"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
BIN
node_modules/@parcel/watcher-darwin-arm64/watcher.node
generated
vendored
BIN
node_modules/@parcel/watcher-darwin-arm64/watcher.node
generated
vendored
Binary file not shown.
21
node_modules/@parcel/watcher/LICENSE
generated
vendored
21
node_modules/@parcel/watcher/LICENSE
generated
vendored
@ -1,21 +0,0 @@
|
|||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) 2017-present Devon Govett
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
135
node_modules/@parcel/watcher/README.md
generated
vendored
135
node_modules/@parcel/watcher/README.md
generated
vendored
@ -1,135 +0,0 @@
|
|||||||
# @parcel/watcher
|
|
||||||
|
|
||||||
A native C++ Node module for querying and subscribing to filesystem events. Used by [Parcel 2](https://github.com/parcel-bundler/parcel).
|
|
||||||
|
|
||||||
## Features
|
|
||||||
|
|
||||||
- **Watch** - subscribe to realtime recursive directory change notifications when files or directories are created, updated, or deleted.
|
|
||||||
- **Query** - performantly query for historical change events in a directory, even when your program is not running.
|
|
||||||
- **Native** - implemented in C++ for performance and low-level integration with the operating system.
|
|
||||||
- **Cross platform** - includes backends for macOS, Linux, Windows, FreeBSD, and Watchman.
|
|
||||||
- **Performant** - events are throttled in C++ so the JavaScript thread is not overwhelmed during large filesystem changes (e.g. `git checkout` or `npm install`).
|
|
||||||
- **Scalable** - tens of thousands of files can be watched or queried at once with good performance.
|
|
||||||
|
|
||||||
## Example
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
const watcher = require('@parcel/watcher');
|
|
||||||
const path = require('path');
|
|
||||||
|
|
||||||
// Subscribe to events
|
|
||||||
let subscription = await watcher.subscribe(process.cwd(), (err, events) => {
|
|
||||||
console.log(events);
|
|
||||||
});
|
|
||||||
|
|
||||||
// later on...
|
|
||||||
await subscription.unsubscribe();
|
|
||||||
|
|
||||||
// Get events since some saved snapshot in the past
|
|
||||||
let snapshotPath = path.join(process.cwd(), 'snapshot.txt');
|
|
||||||
let events = await watcher.getEventsSince(process.cwd(), snapshotPath);
|
|
||||||
|
|
||||||
// Save a snapshot for later
|
|
||||||
await watcher.writeSnapshot(process.cwd(), snapshotPath);
|
|
||||||
```
|
|
||||||
|
|
||||||
## Watching
|
|
||||||
|
|
||||||
`@parcel/watcher` supports subscribing to realtime notifications of changes in a directory. It works recursively, so changes in sub-directories will also be emitted.
|
|
||||||
|
|
||||||
Events are throttled and coalesced for performance during large changes like `git checkout` or `npm install`, and a single notification will be emitted with all of the events at the end.
|
|
||||||
|
|
||||||
Only one notification will be emitted per file. For example, if a file was both created and updated since the last event, you'll get only a `create` event. If a file is both created and deleted, you will not be notifed of that file. Renames cause two events: a `delete` for the old name, and a `create` for the new name.
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
let subscription = await watcher.subscribe(process.cwd(), (err, events) => {
|
|
||||||
console.log(events);
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
Events have two properties:
|
|
||||||
|
|
||||||
- `type` - the event type: `create`, `update`, or `delete`.
|
|
||||||
- `path` - the absolute path to the file or directory.
|
|
||||||
|
|
||||||
To unsubscribe from change notifications, call the `unsubscribe` method on the returned subscription object.
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
await subscription.unsubscribe();
|
|
||||||
```
|
|
||||||
|
|
||||||
`@parcel/watcher` has the following watcher backends, listed in priority order:
|
|
||||||
|
|
||||||
- [FSEvents](https://developer.apple.com/documentation/coreservices/file_system_events) on macOS
|
|
||||||
- [Watchman](https://facebook.github.io/watchman/) if installed
|
|
||||||
- [inotify](http://man7.org/linux/man-pages/man7/inotify.7.html) on Linux
|
|
||||||
- [ReadDirectoryChangesW](https://msdn.microsoft.com/en-us/library/windows/desktop/aa365465%28v%3Dvs.85%29.aspx) on Windows
|
|
||||||
- [kqueue](https://man.freebsd.org/cgi/man.cgi?kqueue) on FreeBSD, or as an alternative to FSEvents on macOS
|
|
||||||
|
|
||||||
You can specify the exact backend you wish to use by passing the `backend` option. If that backend is not available on the current platform, the default backend will be used instead. See below for the list of backend names that can be passed to the options.
|
|
||||||
|
|
||||||
## Querying
|
|
||||||
|
|
||||||
`@parcel/watcher` also supports querying for historical changes made in a directory, even when your program is not running. This makes it easy to invalidate a cache and re-build only the files that have changed, for example. It can be **significantly** faster than traversing the entire filesystem to determine what files changed, depending on the platform.
|
|
||||||
|
|
||||||
In order to query for historical changes, you first need a previous snapshot to compare to. This can be saved to a file with the `writeSnapshot` function, e.g. just before your program exits.
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
await watcher.writeSnapshot(dirPath, snapshotPath);
|
|
||||||
```
|
|
||||||
|
|
||||||
When your program starts up, you can query for changes that have occurred since that snapshot using the `getEventsSince` function.
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
let events = await watcher.getEventsSince(dirPath, snapshotPath);
|
|
||||||
```
|
|
||||||
|
|
||||||
The events returned are exactly the same as the events that would be passed to the `subscribe` callback (see above).
|
|
||||||
|
|
||||||
`@parcel/watcher` has the following watcher backends, listed in priority order:
|
|
||||||
|
|
||||||
- [FSEvents](https://developer.apple.com/documentation/coreservices/file_system_events) on macOS
|
|
||||||
- [Watchman](https://facebook.github.io/watchman/) if installed
|
|
||||||
- [fts](http://man7.org/linux/man-pages/man3/fts.3.html) (brute force) on Linux and FreeBSD
|
|
||||||
- [FindFirstFile](https://docs.microsoft.com/en-us/windows/desktop/api/fileapi/nf-fileapi-findfirstfilea) (brute force) on Windows
|
|
||||||
|
|
||||||
The FSEvents (macOS) and Watchman backends are significantly more performant than the brute force backends used by default on Linux and Windows, for example returning results in miliseconds instead of seconds for large directory trees. This is because a background daemon monitoring filesystem changes on those platforms allows us to query cached data rather than traversing the filesystem manually (brute force).
|
|
||||||
|
|
||||||
macOS has good performance with FSEvents by default. For the best performance on other platforms, install [Watchman](https://facebook.github.io/watchman/) and it will be used by `@parcel/watcher` automatically.
|
|
||||||
|
|
||||||
You can specify the exact backend you wish to use by passing the `backend` option. If that backend is not available on the current platform, the default backend will be used instead. See below for the list of backend names that can be passed to the options.
|
|
||||||
|
|
||||||
## Options
|
|
||||||
|
|
||||||
All of the APIs in `@parcel/watcher` support the following options, which are passed as an object as the last function argument.
|
|
||||||
|
|
||||||
- `ignore` - an array of paths or glob patterns to ignore. uses [`is-glob`](https://github.com/micromatch/is-glob) to distinguish paths from globs. glob patterns are parsed with [`micromatch`](https://github.com/micromatch/micromatch) (see [features](https://github.com/micromatch/micromatch#matching-features)).
|
|
||||||
- paths can be relative or absolute and can either be files or directories. No events will be emitted about these files or directories or their children.
|
|
||||||
- glob patterns match on relative paths from the root that is watched. No events will be emitted for matching paths.
|
|
||||||
- `backend` - the name of an explicitly chosen backend to use. Allowed options are `"fs-events"`, `"watchman"`, `"inotify"`, `"kqueue"`, `"windows"`, or `"brute-force"` (only for querying). If the specified backend is not available on the current platform, the default backend will be used instead.
|
|
||||||
|
|
||||||
## WASM
|
|
||||||
|
|
||||||
The `@parcel/watcher-wasm` package can be used in place of `@parcel/watcher` on unsupported platforms. It relies on the Node `fs` module, so in non-Node environments such as browsers, an `fs` polyfill will be needed.
|
|
||||||
|
|
||||||
**Note**: the WASM implementation is significantly less efficient than the native implementations because it must crawl the file system to watch each directory individually. Use the native `@parcel/watcher` package wherever possible.
|
|
||||||
|
|
||||||
```js
|
|
||||||
import {subscribe} from '@parcel/watcher-wasm';
|
|
||||||
|
|
||||||
// Use the module as documented above.
|
|
||||||
subscribe(/* ... */);
|
|
||||||
```
|
|
||||||
|
|
||||||
## Who is using this?
|
|
||||||
|
|
||||||
- [Parcel 2](https://parceljs.org/)
|
|
||||||
- [VSCode](https://code.visualstudio.com/updates/v1_62#_file-watching-changes)
|
|
||||||
- [Tailwind CSS Intellisense](https://github.com/tailwindlabs/tailwindcss-intellisense)
|
|
||||||
- [Gatsby Cloud](https://twitter.com/chatsidhartha/status/1435647412828196867)
|
|
||||||
- [Nx](https://nx.dev)
|
|
||||||
- [Nuxt](https://nuxt.com)
|
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
MIT
|
|
||||||
93
node_modules/@parcel/watcher/binding.gyp
generated
vendored
93
node_modules/@parcel/watcher/binding.gyp
generated
vendored
@ -1,93 +0,0 @@
|
|||||||
{
|
|
||||||
"targets": [
|
|
||||||
{
|
|
||||||
"target_name": "watcher",
|
|
||||||
"defines": [ "NAPI_DISABLE_CPP_EXCEPTIONS" ],
|
|
||||||
"sources": [ "src/binding.cc", "src/Watcher.cc", "src/Backend.cc", "src/DirTree.cc", "src/Glob.cc", "src/Debounce.cc" ],
|
|
||||||
"include_dirs" : ["<!(node -p \"require('node-addon-api').include_dir\")"],
|
|
||||||
'cflags!': [ '-fno-exceptions', '-std=c++17' ],
|
|
||||||
'cflags_cc!': [ '-fno-exceptions', '-std=c++17' ],
|
|
||||||
"conditions": [
|
|
||||||
['OS=="mac"', {
|
|
||||||
"sources": [
|
|
||||||
"src/watchman/BSER.cc",
|
|
||||||
"src/watchman/WatchmanBackend.cc",
|
|
||||||
"src/shared/BruteForceBackend.cc",
|
|
||||||
"src/unix/fts.cc",
|
|
||||||
"src/macos/FSEventsBackend.cc",
|
|
||||||
"src/kqueue/KqueueBackend.cc"
|
|
||||||
],
|
|
||||||
"link_settings": {
|
|
||||||
"libraries": ["CoreServices.framework"]
|
|
||||||
},
|
|
||||||
"defines": [
|
|
||||||
"WATCHMAN",
|
|
||||||
"BRUTE_FORCE",
|
|
||||||
"FS_EVENTS",
|
|
||||||
"KQUEUE"
|
|
||||||
],
|
|
||||||
"xcode_settings": {
|
|
||||||
"GCC_ENABLE_CPP_EXCEPTIONS": "YES"
|
|
||||||
}
|
|
||||||
}],
|
|
||||||
['OS=="mac" and target_arch=="arm64"', {
|
|
||||||
"xcode_settings": {
|
|
||||||
"ARCHS": ["arm64"]
|
|
||||||
}
|
|
||||||
}],
|
|
||||||
['OS=="linux" or OS=="android"', {
|
|
||||||
"sources": [
|
|
||||||
"src/watchman/BSER.cc",
|
|
||||||
"src/watchman/WatchmanBackend.cc",
|
|
||||||
"src/shared/BruteForceBackend.cc",
|
|
||||||
"src/linux/InotifyBackend.cc",
|
|
||||||
"src/unix/legacy.cc"
|
|
||||||
],
|
|
||||||
"defines": [
|
|
||||||
"WATCHMAN",
|
|
||||||
"INOTIFY",
|
|
||||||
"BRUTE_FORCE"
|
|
||||||
]
|
|
||||||
}],
|
|
||||||
['OS=="win"', {
|
|
||||||
"sources": [
|
|
||||||
"src/watchman/BSER.cc",
|
|
||||||
"src/watchman/WatchmanBackend.cc",
|
|
||||||
"src/shared/BruteForceBackend.cc",
|
|
||||||
"src/windows/WindowsBackend.cc",
|
|
||||||
"src/windows/win_utils.cc"
|
|
||||||
],
|
|
||||||
"defines": [
|
|
||||||
"WATCHMAN",
|
|
||||||
"WINDOWS",
|
|
||||||
"BRUTE_FORCE"
|
|
||||||
],
|
|
||||||
"msvs_settings": {
|
|
||||||
"VCCLCompilerTool": {
|
|
||||||
"ExceptionHandling": 1, # /EHsc
|
|
||||||
"AdditionalOptions": ['-std:c++17']
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}],
|
|
||||||
['OS=="freebsd"', {
|
|
||||||
"sources": [
|
|
||||||
"src/watchman/BSER.cc",
|
|
||||||
"src/watchman/WatchmanBackend.cc",
|
|
||||||
"src/shared/BruteForceBackend.cc",
|
|
||||||
"src/unix/fts.cc",
|
|
||||||
"src/kqueue/KqueueBackend.cc"
|
|
||||||
],
|
|
||||||
"defines": [
|
|
||||||
"WATCHMAN",
|
|
||||||
"BRUTE_FORCE",
|
|
||||||
"KQUEUE"
|
|
||||||
]
|
|
||||||
}]
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"variables": {
|
|
||||||
"openssl_fips": "",
|
|
||||||
"node_use_dtrace": "false"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
49
node_modules/@parcel/watcher/index.d.ts
generated
vendored
49
node_modules/@parcel/watcher/index.d.ts
generated
vendored
@ -1,49 +0,0 @@
|
|||||||
declare type FilePath = string;
|
|
||||||
declare type GlobPattern = string;
|
|
||||||
|
|
||||||
declare namespace ParcelWatcher {
|
|
||||||
export type BackendType =
|
|
||||||
| 'fs-events'
|
|
||||||
| 'watchman'
|
|
||||||
| 'inotify'
|
|
||||||
| 'windows'
|
|
||||||
| 'brute-force';
|
|
||||||
export type EventType = 'create' | 'update' | 'delete';
|
|
||||||
export interface Options {
|
|
||||||
ignore?: (FilePath|GlobPattern)[];
|
|
||||||
backend?: BackendType;
|
|
||||||
}
|
|
||||||
export type SubscribeCallback = (
|
|
||||||
err: Error | null,
|
|
||||||
events: Event[]
|
|
||||||
) => unknown;
|
|
||||||
export interface AsyncSubscription {
|
|
||||||
unsubscribe(): Promise<void>;
|
|
||||||
}
|
|
||||||
export interface Event {
|
|
||||||
path: FilePath;
|
|
||||||
type: EventType;
|
|
||||||
}
|
|
||||||
export function getEventsSince(
|
|
||||||
dir: FilePath,
|
|
||||||
snapshot: FilePath,
|
|
||||||
opts?: Options
|
|
||||||
): Promise<Event[]>;
|
|
||||||
export function subscribe(
|
|
||||||
dir: FilePath,
|
|
||||||
fn: SubscribeCallback,
|
|
||||||
opts?: Options
|
|
||||||
): Promise<AsyncSubscription>;
|
|
||||||
export function unsubscribe(
|
|
||||||
dir: FilePath,
|
|
||||||
fn: SubscribeCallback,
|
|
||||||
opts?: Options
|
|
||||||
): Promise<void>;
|
|
||||||
export function writeSnapshot(
|
|
||||||
dir: FilePath,
|
|
||||||
snapshot: FilePath,
|
|
||||||
opts?: Options
|
|
||||||
): Promise<FilePath>;
|
|
||||||
}
|
|
||||||
|
|
||||||
export = ParcelWatcher;
|
|
||||||
41
node_modules/@parcel/watcher/index.js
generated
vendored
41
node_modules/@parcel/watcher/index.js
generated
vendored
@ -1,41 +0,0 @@
|
|||||||
const {createWrapper} = require('./wrapper');
|
|
||||||
|
|
||||||
let name = `@parcel/watcher-${process.platform}-${process.arch}`;
|
|
||||||
if (process.platform === 'linux') {
|
|
||||||
const { MUSL, family } = require('detect-libc');
|
|
||||||
if (family === MUSL) {
|
|
||||||
name += '-musl';
|
|
||||||
} else {
|
|
||||||
name += '-glibc';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let binding;
|
|
||||||
try {
|
|
||||||
binding = require(name);
|
|
||||||
} catch (err) {
|
|
||||||
handleError(err);
|
|
||||||
try {
|
|
||||||
binding = require('./build/Release/watcher.node');
|
|
||||||
} catch (err) {
|
|
||||||
handleError(err);
|
|
||||||
try {
|
|
||||||
binding = require('./build/Debug/watcher.node');
|
|
||||||
} catch (err) {
|
|
||||||
handleError(err);
|
|
||||||
throw new Error(`No prebuild or local build of @parcel/watcher found. Tried ${name}. Please ensure it is installed (don't use --no-optional when installing with npm). Otherwise it is possible we don't support your platform yet. If this is the case, please report an issue to https://github.com/parcel-bundler/watcher.`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function handleError(err) {
|
|
||||||
if (err?.code !== 'MODULE_NOT_FOUND') {
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const wrapper = createWrapper(binding);
|
|
||||||
exports.writeSnapshot = wrapper.writeSnapshot;
|
|
||||||
exports.getEventsSince = wrapper.getEventsSince;
|
|
||||||
exports.subscribe = wrapper.subscribe;
|
|
||||||
exports.unsubscribe = wrapper.unsubscribe;
|
|
||||||
48
node_modules/@parcel/watcher/index.js.flow
generated
vendored
48
node_modules/@parcel/watcher/index.js.flow
generated
vendored
@ -1,48 +0,0 @@
|
|||||||
// @flow
|
|
||||||
declare type FilePath = string;
|
|
||||||
declare type GlobPattern = string;
|
|
||||||
|
|
||||||
export type BackendType =
|
|
||||||
| 'fs-events'
|
|
||||||
| 'watchman'
|
|
||||||
| 'inotify'
|
|
||||||
| 'windows'
|
|
||||||
| 'brute-force';
|
|
||||||
export type EventType = 'create' | 'update' | 'delete';
|
|
||||||
export interface Options {
|
|
||||||
ignore?: Array<FilePath | GlobPattern>,
|
|
||||||
backend?: BackendType
|
|
||||||
}
|
|
||||||
export type SubscribeCallback = (
|
|
||||||
err: ?Error,
|
|
||||||
events: Array<Event>
|
|
||||||
) => mixed;
|
|
||||||
export interface AsyncSubscription {
|
|
||||||
unsubscribe(): Promise<void>
|
|
||||||
}
|
|
||||||
export interface Event {
|
|
||||||
path: FilePath,
|
|
||||||
type: EventType
|
|
||||||
}
|
|
||||||
declare module.exports: {
|
|
||||||
getEventsSince(
|
|
||||||
dir: FilePath,
|
|
||||||
snapshot: FilePath,
|
|
||||||
opts?: Options
|
|
||||||
): Promise<Array<Event>>,
|
|
||||||
subscribe(
|
|
||||||
dir: FilePath,
|
|
||||||
fn: SubscribeCallback,
|
|
||||||
opts?: Options
|
|
||||||
): Promise<AsyncSubscription>,
|
|
||||||
unsubscribe(
|
|
||||||
dir: FilePath,
|
|
||||||
fn: SubscribeCallback,
|
|
||||||
opts?: Options
|
|
||||||
): Promise<void>,
|
|
||||||
writeSnapshot(
|
|
||||||
dir: FilePath,
|
|
||||||
snapshot: FilePath,
|
|
||||||
opts?: Options
|
|
||||||
): Promise<FilePath>
|
|
||||||
}
|
|
||||||
88
node_modules/@parcel/watcher/package.json
generated
vendored
88
node_modules/@parcel/watcher/package.json
generated
vendored
@ -1,88 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@parcel/watcher",
|
|
||||||
"version": "2.5.1",
|
|
||||||
"main": "index.js",
|
|
||||||
"types": "index.d.ts",
|
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "https://github.com/parcel-bundler/watcher.git"
|
|
||||||
},
|
|
||||||
"description": "A native C++ Node module for querying and subscribing to filesystem events. Used by Parcel 2.",
|
|
||||||
"license": "MIT",
|
|
||||||
"publishConfig": {
|
|
||||||
"access": "public"
|
|
||||||
},
|
|
||||||
"funding": {
|
|
||||||
"type": "opencollective",
|
|
||||||
"url": "https://opencollective.com/parcel"
|
|
||||||
},
|
|
||||||
"files": [
|
|
||||||
"index.js",
|
|
||||||
"index.js.flow",
|
|
||||||
"index.d.ts",
|
|
||||||
"wrapper.js",
|
|
||||||
"package.json",
|
|
||||||
"README.md",
|
|
||||||
"LICENSE",
|
|
||||||
"src",
|
|
||||||
"scripts/build-from-source.js",
|
|
||||||
"binding.gyp"
|
|
||||||
],
|
|
||||||
"scripts": {
|
|
||||||
"prebuild": "prebuildify --napi --strip --tag-libc",
|
|
||||||
"format": "prettier --write \"./**/*.{js,json,md}\"",
|
|
||||||
"build": "node-gyp rebuild",
|
|
||||||
"install": "node scripts/build-from-source.js",
|
|
||||||
"test": "mocha"
|
|
||||||
},
|
|
||||||
"engines": {
|
|
||||||
"node": ">= 10.0.0"
|
|
||||||
},
|
|
||||||
"husky": {
|
|
||||||
"hooks": {
|
|
||||||
"pre-commit": "lint-staged"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"lint-staged": {
|
|
||||||
"*.{js,json,md}": [
|
|
||||||
"prettier --write",
|
|
||||||
"git add"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"detect-libc": "^1.0.3",
|
|
||||||
"is-glob": "^4.0.3",
|
|
||||||
"micromatch": "^4.0.5",
|
|
||||||
"node-addon-api": "^7.0.0"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"esbuild": "^0.19.8",
|
|
||||||
"fs-extra": "^10.0.0",
|
|
||||||
"husky": "^7.0.2",
|
|
||||||
"lint-staged": "^11.1.2",
|
|
||||||
"mocha": "^9.1.1",
|
|
||||||
"napi-wasm": "^1.1.0",
|
|
||||||
"prebuildify": "^6.0.1",
|
|
||||||
"prettier": "^2.3.2"
|
|
||||||
},
|
|
||||||
"binary": {
|
|
||||||
"napi_versions": [
|
|
||||||
3
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"optionalDependencies": {
|
|
||||||
"@parcel/watcher-darwin-x64": "2.5.1",
|
|
||||||
"@parcel/watcher-darwin-arm64": "2.5.1",
|
|
||||||
"@parcel/watcher-win32-x64": "2.5.1",
|
|
||||||
"@parcel/watcher-win32-arm64": "2.5.1",
|
|
||||||
"@parcel/watcher-win32-ia32": "2.5.1",
|
|
||||||
"@parcel/watcher-linux-x64-glibc": "2.5.1",
|
|
||||||
"@parcel/watcher-linux-x64-musl": "2.5.1",
|
|
||||||
"@parcel/watcher-linux-arm64-glibc": "2.5.1",
|
|
||||||
"@parcel/watcher-linux-arm64-musl": "2.5.1",
|
|
||||||
"@parcel/watcher-linux-arm-glibc": "2.5.1",
|
|
||||||
"@parcel/watcher-linux-arm-musl": "2.5.1",
|
|
||||||
"@parcel/watcher-android-arm64": "2.5.1",
|
|
||||||
"@parcel/watcher-freebsd-x64": "2.5.1"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
13
node_modules/@parcel/watcher/scripts/build-from-source.js
generated
vendored
13
node_modules/@parcel/watcher/scripts/build-from-source.js
generated
vendored
@ -1,13 +0,0 @@
|
|||||||
#!/usr/bin/env node
|
|
||||||
|
|
||||||
const {spawn} = require('child_process');
|
|
||||||
|
|
||||||
if (process.env.npm_config_build_from_source === 'true') {
|
|
||||||
build();
|
|
||||||
}
|
|
||||||
|
|
||||||
function build() {
|
|
||||||
spawn('node-gyp', ['rebuild'], { stdio: 'inherit', shell: true }).on('exit', function (code) {
|
|
||||||
process.exit(code);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
182
node_modules/@parcel/watcher/src/Backend.cc
generated
vendored
182
node_modules/@parcel/watcher/src/Backend.cc
generated
vendored
@ -1,182 +0,0 @@
|
|||||||
#ifdef FS_EVENTS
|
|
||||||
#include "macos/FSEventsBackend.hh"
|
|
||||||
#endif
|
|
||||||
#ifdef WATCHMAN
|
|
||||||
#include "watchman/WatchmanBackend.hh"
|
|
||||||
#endif
|
|
||||||
#ifdef WINDOWS
|
|
||||||
#include "windows/WindowsBackend.hh"
|
|
||||||
#endif
|
|
||||||
#ifdef INOTIFY
|
|
||||||
#include "linux/InotifyBackend.hh"
|
|
||||||
#endif
|
|
||||||
#ifdef KQUEUE
|
|
||||||
#include "kqueue/KqueueBackend.hh"
|
|
||||||
#endif
|
|
||||||
#ifdef __wasm32__
|
|
||||||
#include "wasm/WasmBackend.hh"
|
|
||||||
#endif
|
|
||||||
#include "shared/BruteForceBackend.hh"
|
|
||||||
|
|
||||||
#include "Backend.hh"
|
|
||||||
#include <unordered_map>
|
|
||||||
|
|
||||||
static std::unordered_map<std::string, std::shared_ptr<Backend>> sharedBackends;
|
|
||||||
|
|
||||||
std::shared_ptr<Backend> getBackend(std::string backend) {
|
|
||||||
// Use FSEvents on macOS by default.
|
|
||||||
// Use watchman by default if available on other platforms.
|
|
||||||
// Fall back to brute force.
|
|
||||||
#ifdef FS_EVENTS
|
|
||||||
if (backend == "fs-events" || backend == "default") {
|
|
||||||
return std::make_shared<FSEventsBackend>();
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
#ifdef WATCHMAN
|
|
||||||
if ((backend == "watchman" || backend == "default") && WatchmanBackend::checkAvailable()) {
|
|
||||||
return std::make_shared<WatchmanBackend>();
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
#ifdef WINDOWS
|
|
||||||
if (backend == "windows" || backend == "default") {
|
|
||||||
return std::make_shared<WindowsBackend>();
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
#ifdef INOTIFY
|
|
||||||
if (backend == "inotify" || backend == "default") {
|
|
||||||
return std::make_shared<InotifyBackend>();
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
#ifdef KQUEUE
|
|
||||||
if (backend == "kqueue" || backend == "default") {
|
|
||||||
return std::make_shared<KqueueBackend>();
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
#ifdef __wasm32__
|
|
||||||
if (backend == "wasm" || backend == "default") {
|
|
||||||
return std::make_shared<WasmBackend>();
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
if (backend == "brute-force" || backend == "default") {
|
|
||||||
return std::make_shared<BruteForceBackend>();
|
|
||||||
}
|
|
||||||
|
|
||||||
return nullptr;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::shared_ptr<Backend> Backend::getShared(std::string backend) {
|
|
||||||
auto found = sharedBackends.find(backend);
|
|
||||||
if (found != sharedBackends.end()) {
|
|
||||||
return found->second;
|
|
||||||
}
|
|
||||||
|
|
||||||
auto result = getBackend(backend);
|
|
||||||
if (!result) {
|
|
||||||
return getShared("default");
|
|
||||||
}
|
|
||||||
|
|
||||||
result->run();
|
|
||||||
sharedBackends.emplace(backend, result);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
void removeShared(Backend *backend) {
|
|
||||||
for (auto it = sharedBackends.begin(); it != sharedBackends.end(); it++) {
|
|
||||||
if (it->second.get() == backend) {
|
|
||||||
sharedBackends.erase(it);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Free up memory.
|
|
||||||
if (sharedBackends.size() == 0) {
|
|
||||||
sharedBackends.rehash(0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void Backend::run() {
|
|
||||||
#ifndef __wasm32__
|
|
||||||
mThread = std::thread([this] () {
|
|
||||||
try {
|
|
||||||
start();
|
|
||||||
} catch (std::exception &err) {
|
|
||||||
handleError(err);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (mThread.joinable()) {
|
|
||||||
mStartedSignal.wait();
|
|
||||||
}
|
|
||||||
#else
|
|
||||||
try {
|
|
||||||
start();
|
|
||||||
} catch (std::exception &err) {
|
|
||||||
handleError(err);
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
void Backend::notifyStarted() {
|
|
||||||
mStartedSignal.notify();
|
|
||||||
}
|
|
||||||
|
|
||||||
void Backend::start() {
|
|
||||||
notifyStarted();
|
|
||||||
}
|
|
||||||
|
|
||||||
Backend::~Backend() {
|
|
||||||
#ifndef __wasm32__
|
|
||||||
// Wait for thread to stop
|
|
||||||
if (mThread.joinable()) {
|
|
||||||
// If the backend is being destroyed from the thread itself, detach, otherwise join.
|
|
||||||
if (mThread.get_id() == std::this_thread::get_id()) {
|
|
||||||
mThread.detach();
|
|
||||||
} else {
|
|
||||||
mThread.join();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
void Backend::watch(WatcherRef watcher) {
|
|
||||||
std::unique_lock<std::mutex> lock(mMutex);
|
|
||||||
auto res = mSubscriptions.find(watcher);
|
|
||||||
if (res == mSubscriptions.end()) {
|
|
||||||
try {
|
|
||||||
this->subscribe(watcher);
|
|
||||||
mSubscriptions.insert(watcher);
|
|
||||||
} catch (std::exception &err) {
|
|
||||||
unref();
|
|
||||||
throw;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void Backend::unwatch(WatcherRef watcher) {
|
|
||||||
std::unique_lock<std::mutex> lock(mMutex);
|
|
||||||
size_t deleted = mSubscriptions.erase(watcher);
|
|
||||||
if (deleted > 0) {
|
|
||||||
this->unsubscribe(watcher);
|
|
||||||
unref();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void Backend::unref() {
|
|
||||||
if (mSubscriptions.size() == 0) {
|
|
||||||
removeShared(this);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void Backend::handleWatcherError(WatcherError &err) {
|
|
||||||
unwatch(err.mWatcher);
|
|
||||||
err.mWatcher->notifyError(err);
|
|
||||||
}
|
|
||||||
|
|
||||||
void Backend::handleError(std::exception &err) {
|
|
||||||
std::unique_lock<std::mutex> lock(mMutex);
|
|
||||||
for (auto it = mSubscriptions.begin(); it != mSubscriptions.end(); it++) {
|
|
||||||
(*it)->notifyError(err);
|
|
||||||
}
|
|
||||||
|
|
||||||
removeShared(this);
|
|
||||||
}
|
|
||||||
37
node_modules/@parcel/watcher/src/Backend.hh
generated
vendored
37
node_modules/@parcel/watcher/src/Backend.hh
generated
vendored
@ -1,37 +0,0 @@
|
|||||||
#ifndef BACKEND_H
|
|
||||||
#define BACKEND_H
|
|
||||||
|
|
||||||
#include "Event.hh"
|
|
||||||
#include "Watcher.hh"
|
|
||||||
#include "Signal.hh"
|
|
||||||
#include <thread>
|
|
||||||
|
|
||||||
class Backend {
|
|
||||||
public:
|
|
||||||
virtual ~Backend();
|
|
||||||
void run();
|
|
||||||
void notifyStarted();
|
|
||||||
|
|
||||||
virtual void start();
|
|
||||||
virtual void writeSnapshot(WatcherRef watcher, std::string *snapshotPath) = 0;
|
|
||||||
virtual void getEventsSince(WatcherRef watcher, std::string *snapshotPath) = 0;
|
|
||||||
virtual void subscribe(WatcherRef watcher) = 0;
|
|
||||||
virtual void unsubscribe(WatcherRef watcher) = 0;
|
|
||||||
|
|
||||||
static std::shared_ptr<Backend> getShared(std::string backend);
|
|
||||||
|
|
||||||
void watch(WatcherRef watcher);
|
|
||||||
void unwatch(WatcherRef watcher);
|
|
||||||
void unref();
|
|
||||||
void handleWatcherError(WatcherError &err);
|
|
||||||
|
|
||||||
std::mutex mMutex;
|
|
||||||
std::thread mThread;
|
|
||||||
private:
|
|
||||||
std::unordered_set<WatcherRef> mSubscriptions;
|
|
||||||
Signal mStartedSignal;
|
|
||||||
|
|
||||||
void handleError(std::exception &err);
|
|
||||||
};
|
|
||||||
|
|
||||||
#endif
|
|
||||||
113
node_modules/@parcel/watcher/src/Debounce.cc
generated
vendored
113
node_modules/@parcel/watcher/src/Debounce.cc
generated
vendored
@ -1,113 +0,0 @@
|
|||||||
#include "Debounce.hh"
|
|
||||||
|
|
||||||
#ifdef __wasm32__
|
|
||||||
extern "C" void on_timeout(void *ctx) {
|
|
||||||
Debounce *debounce = (Debounce *)ctx;
|
|
||||||
debounce->notify();
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
std::shared_ptr<Debounce> Debounce::getShared() {
|
|
||||||
static std::weak_ptr<Debounce> sharedInstance;
|
|
||||||
std::shared_ptr<Debounce> shared = sharedInstance.lock();
|
|
||||||
if (!shared) {
|
|
||||||
shared = std::make_shared<Debounce>();
|
|
||||||
sharedInstance = shared;
|
|
||||||
}
|
|
||||||
|
|
||||||
return shared;
|
|
||||||
}
|
|
||||||
|
|
||||||
Debounce::Debounce() {
|
|
||||||
mRunning = true;
|
|
||||||
#ifndef __wasm32__
|
|
||||||
mThread = std::thread([this] () {
|
|
||||||
loop();
|
|
||||||
});
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
Debounce::~Debounce() {
|
|
||||||
mRunning = false;
|
|
||||||
#ifndef __wasm32__
|
|
||||||
mWaitSignal.notify();
|
|
||||||
mThread.join();
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
void Debounce::add(void *key, std::function<void()> cb) {
|
|
||||||
std::unique_lock<std::mutex> lock(mMutex);
|
|
||||||
mCallbacks.emplace(key, cb);
|
|
||||||
}
|
|
||||||
|
|
||||||
void Debounce::remove(void *key) {
|
|
||||||
std::unique_lock<std::mutex> lock(mMutex);
|
|
||||||
mCallbacks.erase(key);
|
|
||||||
}
|
|
||||||
|
|
||||||
void Debounce::trigger() {
|
|
||||||
std::unique_lock<std::mutex> lock(mMutex);
|
|
||||||
#ifdef __wasm32__
|
|
||||||
notifyIfReady();
|
|
||||||
#else
|
|
||||||
mWaitSignal.notify();
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
#ifndef __wasm32__
|
|
||||||
void Debounce::loop() {
|
|
||||||
while (mRunning) {
|
|
||||||
mWaitSignal.wait();
|
|
||||||
if (!mRunning) {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
notifyIfReady();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
void Debounce::notifyIfReady() {
|
|
||||||
if (!mRunning) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If we haven't seen an event in more than the maximum wait time, notify callbacks immediately
|
|
||||||
// to ensure that we don't wait forever. Otherwise, wait for the minimum wait time and batch
|
|
||||||
// subsequent fast changes. This also means the first file change in a batch is notified immediately,
|
|
||||||
// separately from the rest of the batch. This seems like an acceptable tradeoff if the common case
|
|
||||||
// is that only a single file was updated at a time.
|
|
||||||
auto time = std::chrono::steady_clock::now();
|
|
||||||
if ((time - mLastTime) > std::chrono::milliseconds(MAX_WAIT_TIME)) {
|
|
||||||
mLastTime = time;
|
|
||||||
notify();
|
|
||||||
} else {
|
|
||||||
wait();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void Debounce::wait() {
|
|
||||||
#ifdef __wasm32__
|
|
||||||
clear_timeout(mTimeout);
|
|
||||||
mTimeout = set_timeout(MIN_WAIT_TIME, this);
|
|
||||||
#else
|
|
||||||
auto status = mWaitSignal.waitFor(std::chrono::milliseconds(MIN_WAIT_TIME));
|
|
||||||
if (mRunning && (status == std::cv_status::timeout)) {
|
|
||||||
notify();
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
|
|
||||||
void Debounce::notify() {
|
|
||||||
std::unique_lock<std::mutex> lock(mMutex);
|
|
||||||
|
|
||||||
mLastTime = std::chrono::steady_clock::now();
|
|
||||||
for (auto it = mCallbacks.begin(); it != mCallbacks.end(); it++) {
|
|
||||||
auto cb = it->second;
|
|
||||||
cb();
|
|
||||||
}
|
|
||||||
|
|
||||||
#ifndef __wasm32__
|
|
||||||
mWaitSignal.reset();
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
49
node_modules/@parcel/watcher/src/Debounce.hh
generated
vendored
49
node_modules/@parcel/watcher/src/Debounce.hh
generated
vendored
@ -1,49 +0,0 @@
|
|||||||
#ifndef DEBOUNCE_H
|
|
||||||
#define DEBOUNCE_H
|
|
||||||
|
|
||||||
#include <thread>
|
|
||||||
#include <unordered_map>
|
|
||||||
#include <functional>
|
|
||||||
#include "Signal.hh"
|
|
||||||
|
|
||||||
#define MIN_WAIT_TIME 50
|
|
||||||
#define MAX_WAIT_TIME 500
|
|
||||||
|
|
||||||
#ifdef __wasm32__
|
|
||||||
extern "C" {
|
|
||||||
int set_timeout(int ms, void *ctx);
|
|
||||||
void clear_timeout(int timeout);
|
|
||||||
void on_timeout(void *ctx);
|
|
||||||
};
|
|
||||||
#endif
|
|
||||||
|
|
||||||
class Debounce {
|
|
||||||
public:
|
|
||||||
static std::shared_ptr<Debounce> getShared();
|
|
||||||
|
|
||||||
Debounce();
|
|
||||||
~Debounce();
|
|
||||||
|
|
||||||
void add(void *key, std::function<void()> cb);
|
|
||||||
void remove(void *key);
|
|
||||||
void trigger();
|
|
||||||
void notify();
|
|
||||||
|
|
||||||
private:
|
|
||||||
bool mRunning;
|
|
||||||
std::mutex mMutex;
|
|
||||||
#ifdef __wasm32__
|
|
||||||
int mTimeout;
|
|
||||||
#else
|
|
||||||
Signal mWaitSignal;
|
|
||||||
std::thread mThread;
|
|
||||||
#endif
|
|
||||||
std::unordered_map<void *, std::function<void()>> mCallbacks;
|
|
||||||
std::chrono::time_point<std::chrono::steady_clock> mLastTime;
|
|
||||||
|
|
||||||
void loop();
|
|
||||||
void notifyIfReady();
|
|
||||||
void wait();
|
|
||||||
};
|
|
||||||
|
|
||||||
#endif
|
|
||||||
152
node_modules/@parcel/watcher/src/DirTree.cc
generated
vendored
152
node_modules/@parcel/watcher/src/DirTree.cc
generated
vendored
@ -1,152 +0,0 @@
|
|||||||
#include "DirTree.hh"
|
|
||||||
#include <inttypes.h>
|
|
||||||
|
|
||||||
static std::mutex mDirCacheMutex;
|
|
||||||
static std::unordered_map<std::string, std::weak_ptr<DirTree>> dirTreeCache;
|
|
||||||
|
|
||||||
struct DirTreeDeleter {
|
|
||||||
void operator()(DirTree *tree) {
|
|
||||||
std::lock_guard<std::mutex> lock(mDirCacheMutex);
|
|
||||||
dirTreeCache.erase(tree->root);
|
|
||||||
delete tree;
|
|
||||||
|
|
||||||
// Free up memory.
|
|
||||||
if (dirTreeCache.size() == 0) {
|
|
||||||
dirTreeCache.rehash(0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
std::shared_ptr<DirTree> DirTree::getCached(std::string root) {
|
|
||||||
std::lock_guard<std::mutex> lock(mDirCacheMutex);
|
|
||||||
|
|
||||||
auto found = dirTreeCache.find(root);
|
|
||||||
std::shared_ptr<DirTree> tree;
|
|
||||||
|
|
||||||
// Use cached tree, or create an empty one.
|
|
||||||
if (found != dirTreeCache.end()) {
|
|
||||||
tree = found->second.lock();
|
|
||||||
} else {
|
|
||||||
tree = std::shared_ptr<DirTree>(new DirTree(root), DirTreeDeleter());
|
|
||||||
dirTreeCache.emplace(root, tree);
|
|
||||||
}
|
|
||||||
|
|
||||||
return tree;
|
|
||||||
}
|
|
||||||
|
|
||||||
DirTree::DirTree(std::string root, FILE *f) : root(root), isComplete(true) {
|
|
||||||
size_t size;
|
|
||||||
if (fscanf(f, "%zu", &size)) {
|
|
||||||
for (size_t i = 0; i < size; i++) {
|
|
||||||
DirEntry entry(f);
|
|
||||||
entries.emplace(entry.path, entry);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Internal find method that has no lock
|
|
||||||
DirEntry *DirTree::_find(std::string path) {
|
|
||||||
auto found = entries.find(path);
|
|
||||||
if (found == entries.end()) {
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
return &found->second;
|
|
||||||
}
|
|
||||||
|
|
||||||
DirEntry *DirTree::add(std::string path, uint64_t mtime, bool isDir) {
|
|
||||||
std::lock_guard<std::mutex> lock(mMutex);
|
|
||||||
|
|
||||||
DirEntry entry(path, mtime, isDir);
|
|
||||||
auto it = entries.emplace(entry.path, entry);
|
|
||||||
return &it.first->second;
|
|
||||||
}
|
|
||||||
|
|
||||||
DirEntry *DirTree::find(std::string path) {
|
|
||||||
std::lock_guard<std::mutex> lock(mMutex);
|
|
||||||
return _find(path);
|
|
||||||
}
|
|
||||||
|
|
||||||
DirEntry *DirTree::update(std::string path, uint64_t mtime) {
|
|
||||||
std::lock_guard<std::mutex> lock(mMutex);
|
|
||||||
|
|
||||||
DirEntry *found = _find(path);
|
|
||||||
if (found) {
|
|
||||||
found->mtime = mtime;
|
|
||||||
}
|
|
||||||
|
|
||||||
return found;
|
|
||||||
}
|
|
||||||
|
|
||||||
void DirTree::remove(std::string path) {
|
|
||||||
std::lock_guard<std::mutex> lock(mMutex);
|
|
||||||
|
|
||||||
DirEntry *found = _find(path);
|
|
||||||
|
|
||||||
// Remove all sub-entries if this is a directory
|
|
||||||
if (found && found->isDir) {
|
|
||||||
std::string pathStart = path + DIR_SEP;
|
|
||||||
for (auto it = entries.begin(); it != entries.end();) {
|
|
||||||
if (it->first.rfind(pathStart, 0) == 0) {
|
|
||||||
it = entries.erase(it);
|
|
||||||
} else {
|
|
||||||
it++;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
entries.erase(path);
|
|
||||||
}
|
|
||||||
|
|
||||||
void DirTree::write(FILE *f) {
|
|
||||||
std::lock_guard<std::mutex> lock(mMutex);
|
|
||||||
|
|
||||||
fprintf(f, "%zu\n", entries.size());
|
|
||||||
for (auto it = entries.begin(); it != entries.end(); it++) {
|
|
||||||
it->second.write(f);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void DirTree::getChanges(DirTree *snapshot, EventList &events) {
|
|
||||||
std::lock_guard<std::mutex> lock(mMutex);
|
|
||||||
std::lock_guard<std::mutex> snapshotLock(snapshot->mMutex);
|
|
||||||
|
|
||||||
for (auto it = entries.begin(); it != entries.end(); it++) {
|
|
||||||
auto found = snapshot->entries.find(it->first);
|
|
||||||
if (found == snapshot->entries.end()) {
|
|
||||||
events.create(it->second.path);
|
|
||||||
} else if (found->second.mtime != it->second.mtime && !found->second.isDir && !it->second.isDir) {
|
|
||||||
events.update(it->second.path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (auto it = snapshot->entries.begin(); it != snapshot->entries.end(); it++) {
|
|
||||||
size_t count = entries.count(it->first);
|
|
||||||
if (count == 0) {
|
|
||||||
events.remove(it->second.path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
DirEntry::DirEntry(std::string p, uint64_t t, bool d) {
|
|
||||||
path = p;
|
|
||||||
mtime = t;
|
|
||||||
isDir = d;
|
|
||||||
state = NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
DirEntry::DirEntry(FILE *f) {
|
|
||||||
size_t size;
|
|
||||||
if (fscanf(f, "%zu", &size)) {
|
|
||||||
path.resize(size);
|
|
||||||
if (fread(&path[0], sizeof(char), size, f)) {
|
|
||||||
int d = 0;
|
|
||||||
fscanf(f, "%" PRIu64 " %d\n", &mtime, &d);
|
|
||||||
isDir = d == 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void DirEntry::write(FILE *f) const {
|
|
||||||
fprintf(f, "%zu%s%" PRIu64 " %d\n", path.size(), path.c_str(), mtime, isDir);
|
|
||||||
}
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user