toggle transparency

This commit is contained in:
AnActualEmerald 2022-08-31 03:04:16 -04:00
parent 479c959b18
commit 15246f6cd3
Signed by: emerald
GPG Key ID: CC76D6B296CAC8B0
1670 changed files with 46 additions and 1560353 deletions

2
.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
node_modules/
package-lock.json

1
node_modules/.bin/esbuild generated vendored
View File

@ -1 +0,0 @@
../esbuild/bin/esbuild

1
node_modules/.bin/mkdirp generated vendored
View File

@ -1 +0,0 @@
../mkdirp/bin/cmd.js

1
node_modules/.bin/nanoid generated vendored
View File

@ -1 +0,0 @@
../nanoid/bin/nanoid.cjs

1
node_modules/.bin/resolve generated vendored
View File

@ -1 +0,0 @@
../resolve/bin/resolve

1
node_modules/.bin/rimraf generated vendored
View File

@ -1 +0,0 @@
../rimraf/bin.js

1
node_modules/.bin/rollup generated vendored
View File

@ -1 +0,0 @@
../rollup/dist/bin/rollup

1
node_modules/.bin/sorcery generated vendored
View File

@ -1 +0,0 @@
../sorcery/bin/index.js

1
node_modules/.bin/svelte-check generated vendored
View File

@ -1 +0,0 @@
../svelte-check/bin/svelte-check

1
node_modules/.bin/tauri generated vendored
View File

@ -1 +0,0 @@
../@tauri-apps/cli/tauri.js

1
node_modules/.bin/tsc generated vendored
View File

@ -1 +0,0 @@
../typescript/bin/tsc

1
node_modules/.bin/tsserver generated vendored
View File

@ -1 +0,0 @@
../typescript/bin/tsserver

1
node_modules/.bin/vite generated vendored
View File

@ -1 +0,0 @@
../vite/bin/vite.js

1181
node_modules/.package-lock.json generated vendored

File diff suppressed because it is too large Load Diff

View File

@ -1,133 +0,0 @@
// node_modules/@tauri-apps/api/tslib.es6-9bc0804d.js
var r = function() {
return (r = Object.assign || function(t) {
for (var n, r3 = 1, e = arguments.length; r3 < e; r3++)
for (var o3 in n = arguments[r3])
Object.prototype.hasOwnProperty.call(n, o3) && (t[o3] = n[o3]);
return t;
}).apply(this, arguments);
};
function o(t, n, r3, e) {
return new (r3 || (r3 = Promise))(function(o3, a2) {
function c2(t2) {
try {
i2(e.next(t2));
} catch (t3) {
a2(t3);
}
}
function l(t2) {
try {
i2(e.throw(t2));
} catch (t3) {
a2(t3);
}
}
function i2(t2) {
var n2;
t2.done ? o3(t2.value) : (n2 = t2.value, n2 instanceof r3 ? n2 : new r3(function(t3) {
t3(n2);
})).then(c2, l);
}
i2((e = e.apply(t, n || [])).next());
});
}
function a(t, n) {
var r3, e, o3, a2, c2 = { label: 0, sent: function() {
if (1 & o3[0])
throw o3[1];
return o3[1];
}, trys: [], ops: [] };
return a2 = { next: l(0), throw: l(1), return: l(2) }, "function" == typeof Symbol && (a2[Symbol.iterator] = function() {
return this;
}), a2;
function l(a3) {
return function(l2) {
return function(a4) {
if (r3)
throw new TypeError("Generator is already executing.");
for (; c2; )
try {
if (r3 = 1, e && (o3 = 2 & a4[0] ? e.return : a4[0] ? e.throw || ((o3 = e.return) && o3.call(e), 0) : e.next) && !(o3 = o3.call(e, a4[1])).done)
return o3;
switch (e = 0, o3 && (a4 = [2 & a4[0], o3.value]), a4[0]) {
case 0:
case 1:
o3 = a4;
break;
case 4:
return c2.label++, { value: a4[1], done: false };
case 5:
c2.label++, e = a4[1], a4 = [0];
continue;
case 7:
a4 = c2.ops.pop(), c2.trys.pop();
continue;
default:
if (!(o3 = c2.trys, (o3 = o3.length > 0 && o3[o3.length - 1]) || 6 !== a4[0] && 2 !== a4[0])) {
c2 = 0;
continue;
}
if (3 === a4[0] && (!o3 || a4[1] > o3[0] && a4[1] < o3[3])) {
c2.label = a4[1];
break;
}
if (6 === a4[0] && c2.label < o3[1]) {
c2.label = o3[1], o3 = a4;
break;
}
if (o3 && c2.label < o3[2]) {
c2.label = o3[2], c2.ops.push(a4);
break;
}
o3[2] && c2.ops.pop(), c2.trys.pop();
continue;
}
a4 = n.call(t, c2);
} catch (t2) {
a4 = [6, t2], e = 0;
} finally {
r3 = o3 = 0;
}
if (5 & a4[0])
throw a4[1];
return { value: a4[0] ? a4[1] : void 0, done: true };
}([a3, l2]);
};
}
}
// node_modules/@tauri-apps/api/tauri-fa8f44bd.js
function o2(n, t) {
void 0 === t && (t = false);
var e = window.crypto.getRandomValues(new Uint32Array(1))[0], o3 = "_".concat(e);
return Object.defineProperty(window, o3, { value: function(e2) {
return t && Reflect.deleteProperty(window, o3), null == n ? void 0 : n(e2);
}, writable: false, configurable: true }), e;
}
function r2(r3, c2) {
return void 0 === c2 && (c2 = {}), o(this, void 0, void 0, function() {
return a(this, function(n) {
return [2, new Promise(function(n2, t) {
var i2 = o2(function(t2) {
n2(t2), Reflect.deleteProperty(window, "_".concat(a2));
}, true), a2 = o2(function(n3) {
t(n3), Reflect.deleteProperty(window, "_".concat(i2));
}, true);
window.__TAURI_IPC__(r({ cmd: r3, callback: i2, error: a2 }, c2));
})];
});
});
}
function c(n, t) {
void 0 === t && (t = "asset");
var e = encodeURIComponent(n);
return navigator.userAgent.includes("Windows") ? "https://".concat(t, ".localhost/").concat(e) : "".concat(t, "://").concat(e);
}
var i = Object.freeze({ __proto__: null, transformCallback: o2, invoke: r2, convertFileSrc: c });
export {
c as convertFileSrc,
r2 as invoke,
o2 as transformCallback
};
//# sourceMappingURL=@tauri-apps_api_tauri.js.map

View File

@ -1,7 +0,0 @@
{
"version": 3,
"sources": ["../../@tauri-apps/api/tslib.es6-9bc0804d.js", "../../@tauri-apps/api/tauri-fa8f44bd.js"],
"sourcesContent": ["var t=function(n,r){return(t=Object.setPrototypeOf||{__proto__:[]}instanceof Array&&function(t,n){t.__proto__=n}||function(t,n){for(var r in n)Object.prototype.hasOwnProperty.call(n,r)&&(t[r]=n[r])})(n,r)};function n(n,r){if(\"function\"!=typeof r&&null!==r)throw new TypeError(\"Class extends value \"+String(r)+\" is not a constructor or null\");function e(){this.constructor=n}t(n,r),n.prototype=null===r?Object.create(r):(e.prototype=r.prototype,new e)}var r=function(){return(r=Object.assign||function(t){for(var n,r=1,e=arguments.length;r<e;r++)for(var o in n=arguments[r])Object.prototype.hasOwnProperty.call(n,o)&&(t[o]=n[o]);return t}).apply(this,arguments)};function e(t,n){var r={};for(var e in t)Object.prototype.hasOwnProperty.call(t,e)&&n.indexOf(e)<0&&(r[e]=t[e]);if(null!=t&&\"function\"==typeof Object.getOwnPropertySymbols){var o=0;for(e=Object.getOwnPropertySymbols(t);o<e.length;o++)n.indexOf(e[o])<0&&Object.prototype.propertyIsEnumerable.call(t,e[o])&&(r[e[o]]=t[e[o]])}return r}function o(t,n,r,e){return new(r||(r=Promise))((function(o,a){function c(t){try{i(e.next(t))}catch(t){a(t)}}function l(t){try{i(e.throw(t))}catch(t){a(t)}}function i(t){var n;t.done?o(t.value):(n=t.value,n instanceof r?n:new r((function(t){t(n)}))).then(c,l)}i((e=e.apply(t,n||[])).next())}))}function a(t,n){var r,e,o,a,c={label:0,sent:function(){if(1&o[0])throw o[1];return o[1]},trys:[],ops:[]};return a={next:l(0),throw:l(1),return:l(2)},\"function\"==typeof Symbol&&(a[Symbol.iterator]=function(){return this}),a;function l(a){return function(l){return function(a){if(r)throw new TypeError(\"Generator is already executing.\");for(;c;)try{if(r=1,e&&(o=2&a[0]?e.return:a[0]?e.throw||((o=e.return)&&o.call(e),0):e.next)&&!(o=o.call(e,a[1])).done)return o;switch(e=0,o&&(a=[2&a[0],o.value]),a[0]){case 0:case 1:o=a;break;case 4:return c.label++,{value:a[1],done:!1};case 5:c.label++,e=a[1],a=[0];continue;case 7:a=c.ops.pop(),c.trys.pop();continue;default:if(!(o=c.trys,(o=o.length>0&&o[o.length-1])||6!==a[0]&&2!==a[0])){c=0;continue}if(3===a[0]&&(!o||a[1]>o[0]&&a[1]<o[3])){c.label=a[1];break}if(6===a[0]&&c.label<o[1]){c.label=o[1],o=a;break}if(o&&c.label<o[2]){c.label=o[2],c.ops.push(a);break}o[2]&&c.ops.pop(),c.trys.pop();continue}a=n.call(t,c)}catch(t){a=[6,t],e=0}finally{r=o=0}if(5&a[0])throw a[1];return{value:a[0]?a[1]:void 0,done:!0}}([a,l])}}}function c(t,n,r){if(r||2===arguments.length)for(var e,o=0,a=n.length;o<a;o++)!e&&o in n||(e||(e=Array.prototype.slice.call(n,0,o)),e[o]=n[o]);return t.concat(e||Array.prototype.slice.call(n))}export{o as _,a,r as b,n as c,e as d,c as e};\n", "import{_ as n,a as t,b as e}from\"./tslib.es6-9bc0804d.js\";function o(n,t){void 0===t&&(t=!1);var e=window.crypto.getRandomValues(new Uint32Array(1))[0],o=\"_\".concat(e);return Object.defineProperty(window,o,{value:function(e){return t&&Reflect.deleteProperty(window,o),null==n?void 0:n(e)},writable:!1,configurable:!0}),e}function r(r,c){return void 0===c&&(c={}),n(this,void 0,void 0,(function(){return t(this,(function(n){return[2,new Promise((function(n,t){var i=o((function(t){n(t),Reflect.deleteProperty(window,\"_\".concat(a))}),!0),a=o((function(n){t(n),Reflect.deleteProperty(window,\"_\".concat(i))}),!0);window.__TAURI_IPC__(e({cmd:r,callback:i,error:a},c))}))]}))}))}function c(n,t){void 0===t&&(t=\"asset\");var e=encodeURIComponent(n);return navigator.userAgent.includes(\"Windows\")?\"https://\".concat(t,\".localhost/\").concat(e):\"\".concat(t,\"://\").concat(e)}var i=Object.freeze({__proto__:null,transformCallback:o,invoke:r,convertFileSrc:c});export{i as a,c,r as i,o as t};\n"],
"mappings": ";AAAmc,IAAI,IAAE,WAAU;AAAC,UAAO,IAAE,OAAO,UAAQ,SAAS,GAAE;AAAC,aAAQ,GAAEA,KAAE,GAAE,IAAE,UAAU,QAAOA,KAAE,GAAEA;AAAI,eAAQC,MAAK,IAAE,UAAUD;AAAG,eAAO,UAAU,eAAe,KAAK,GAAEC,EAAC,MAAI,EAAEA,MAAG,EAAEA;AAAI,WAAO;AAAA,EAAC,GAAG,MAAM,MAAK,SAAS;AAAC;AAA6U,SAAS,EAAE,GAAE,GAAEC,IAAE,GAAE;AAAC,SAAO,KAAIA,OAAIA,KAAE,UAAW,SAASC,IAAEC,IAAE;AAAC,aAASC,GAAEC,IAAE;AAAC,UAAG;AAAC,QAAAC,GAAE,EAAE,KAAKD,EAAC,CAAC;AAAA,MAAC,SAAOA,IAAN;AAAS,QAAAF,GAAEE,EAAC;AAAA,MAAC;AAAA,IAAC;AAAC,aAAS,EAAEA,IAAE;AAAC,UAAG;AAAC,QAAAC,GAAE,EAAE,MAAMD,EAAC,CAAC;AAAA,MAAC,SAAOA,IAAN;AAAS,QAAAF,GAAEE,EAAC;AAAA,MAAC;AAAA,IAAC;AAAC,aAASC,GAAED,IAAE;AAAC,UAAIE;AAAE,MAAAF,GAAE,OAAKH,GAAEG,GAAE,KAAK,KAAGE,KAAEF,GAAE,OAAME,cAAaN,KAAEM,KAAE,IAAIN,GAAG,SAASI,IAAE;AAAC,QAAAA,GAAEE,EAAC;AAAA,MAAC,CAAE,GAAG,KAAKH,IAAE,CAAC;AAAA,IAAC;AAAC,IAAAE,IAAG,IAAE,EAAE,MAAM,GAAE,KAAG,CAAC,CAAC,GAAG,KAAK,CAAC;AAAA,EAAC,CAAE;AAAC;AAAC,SAAS,EAAE,GAAE,GAAE;AAAC,MAAIL,IAAE,GAAEC,IAAEC,IAAEC,KAAE,EAAC,OAAM,GAAE,MAAK,WAAU;AAAC,QAAG,IAAEF,GAAE;AAAG,YAAMA,GAAE;AAAG,WAAOA,GAAE;AAAA,EAAE,GAAE,MAAK,CAAC,GAAE,KAAI,CAAC,EAAC;AAAE,SAAOC,KAAE,EAAC,MAAK,EAAE,CAAC,GAAE,OAAM,EAAE,CAAC,GAAE,QAAO,EAAE,CAAC,EAAC,GAAE,cAAY,OAAO,WAASA,GAAE,OAAO,YAAU,WAAU;AAAC,WAAO;AAAA,EAAI,IAAGA;AAAE,WAAS,EAAEA,IAAE;AAAC,WAAO,SAASK,IAAE;AAAC,aAAO,SAASL,IAAE;AAAC,YAAGF;AAAE,gBAAM,IAAI,UAAU,iCAAiC;AAAE,eAAKG;AAAG,cAAG;AAAC,gBAAGH,KAAE,GAAE,MAAIC,KAAE,IAAEC,GAAE,KAAG,EAAE,SAAOA,GAAE,KAAG,EAAE,WAASD,KAAE,EAAE,WAASA,GAAE,KAAK,CAAC,GAAE,KAAG,EAAE,SAAO,EAAEA,KAAEA,GAAE,KAAK,GAAEC,GAAE,EAAE,GAAG;AAAK,qBAAOD;AAAE,oBAAO,IAAE,GAAEA,OAAIC,KAAE,CAAC,IAAEA,GAAE,IAAGD,GAAE,KAAK,IAAGC,GAAE;AAAA,mBAAS;AAAA,mBAAO;AAAE,gBAAAD,KAAEC;AAAE;AAAA,mBAAW;AAAE,uBAAOC,GAAE,SAAQ,EAAC,OAAMD,GAAE,IAAG,MAAK,MAAE;AAAA,mBAAO;AAAE,gBAAAC,GAAE,SAAQ,IAAED,GAAE,IAAGA,KAAE,CAAC,CAAC;AAAE;AAAA,mBAAc;AAAE,gBAAAA,KAAEC,GAAE,IAAI,IAAI,GAAEA,GAAE,KAAK,IAAI;AAAE;AAAA;AAAiB,oBAAG,EAAEF,KAAEE,GAAE,OAAMF,KAAEA,GAAE,SAAO,KAAGA,GAAEA,GAAE,SAAO,OAAK,MAAIC,GAAE,MAAI,MAAIA,GAAE,KAAI;AAAC,kBAAAC,KAAE;AAAE;AAAA,gBAAQ;AAAC,oBAAG,MAAID,GAAE,OAAK,CAACD,MAAGC,GAAE,KAAGD,GAAE,MAAIC,GAAE,KAAGD,GAAE,KAAI;AAAC,kBAAAE,GAAE,QAAMD,GAAE;AAAG;AAAA,gBAAK;AAAC,oBAAG,MAAIA,GAAE,MAAIC,GAAE,QAAMF,GAAE,IAAG;AAAC,kBAAAE,GAAE,QAAMF,GAAE,IAAGA,KAAEC;AAAE;AAAA,gBAAK;AAAC,oBAAGD,MAAGE,GAAE,QAAMF,GAAE,IAAG;AAAC,kBAAAE,GAAE,QAAMF,GAAE,IAAGE,GAAE,IAAI,KAAKD,EAAC;AAAE;AAAA,gBAAK;AAAC,gBAAAD,GAAE,MAAIE,GAAE,IAAI,IAAI,GAAEA,GAAE,KAAK,IAAI;AAAE;AAAA;AAAS,YAAAD,KAAE,EAAE,KAAK,GAAEC,EAAC;AAAA,UAAC,SAAOC,IAAN;AAAS,YAAAF,KAAE,CAAC,GAAEE,EAAC,GAAE,IAAE;AAAA,UAAC,UAAC;AAAQ,YAAAJ,KAAEC,KAAE;AAAA,UAAC;AAAC,YAAG,IAAEC,GAAE;AAAG,gBAAMA,GAAE;AAAG,eAAM,EAAC,OAAMA,GAAE,KAAGA,GAAE,KAAG,QAAO,MAAK,KAAE;AAAA,MAAC,EAAE,CAACA,IAAEK,EAAC,CAAC;AAAA,IAAC;AAAA,EAAC;AAAC;;;ACAjvE,SAASC,GAAE,GAAE,GAAE;AAAC,aAAS,MAAI,IAAE;AAAI,MAAI,IAAE,OAAO,OAAO,gBAAgB,IAAI,YAAY,CAAC,CAAC,EAAE,IAAGA,KAAE,IAAI,OAAO,CAAC;AAAE,SAAO,OAAO,eAAe,QAAOA,IAAE,EAAC,OAAM,SAASC,IAAE;AAAC,WAAO,KAAG,QAAQ,eAAe,QAAOD,EAAC,GAAE,QAAM,IAAE,SAAO,EAAEC,EAAC;AAAA,EAAC,GAAE,UAAS,OAAG,cAAa,KAAE,CAAC,GAAE;AAAC;AAAC,SAASC,GAAEA,IAAEC,IAAE;AAAC,SAAO,WAASA,OAAIA,KAAE,CAAC,IAAG,EAAE,MAAK,QAAO,QAAQ,WAAU;AAAC,WAAO,EAAE,MAAM,SAAS,GAAE;AAAC,aAAM,CAAC,GAAE,IAAI,QAAS,SAASC,IAAE,GAAE;AAAC,YAAIC,KAAEL,GAAG,SAASM,IAAE;AAAC,UAAAF,GAAEE,EAAC,GAAE,QAAQ,eAAe,QAAO,IAAI,OAAOC,EAAC,CAAC;AAAA,QAAC,GAAG,IAAE,GAAEA,KAAEP,GAAG,SAASI,IAAE;AAAC,YAAEA,EAAC,GAAE,QAAQ,eAAe,QAAO,IAAI,OAAOC,EAAC,CAAC;AAAA,QAAC,GAAG,IAAE;AAAE,eAAO,cAAc,EAAE,EAAC,KAAIH,IAAE,UAASG,IAAE,OAAME,GAAC,GAAEJ,EAAC,CAAC;AAAA,MAAC,CAAE,CAAC;AAAA,IAAC,CAAE;AAAA,EAAC,CAAE;AAAC;AAAC,SAAS,EAAE,GAAE,GAAE;AAAC,aAAS,MAAI,IAAE;AAAS,MAAI,IAAE,mBAAmB,CAAC;AAAE,SAAO,UAAU,UAAU,SAAS,SAAS,IAAE,WAAW,OAAO,GAAE,aAAa,EAAE,OAAO,CAAC,IAAE,GAAG,OAAO,GAAE,KAAK,EAAE,OAAO,CAAC;AAAC;AAAC,IAAI,IAAE,OAAO,OAAO,EAAC,WAAU,MAAK,mBAAkBH,IAAE,QAAOE,IAAE,gBAAe,EAAC,CAAC;",
"names": ["r", "o", "r", "o", "a", "c", "t", "i", "n", "l", "o", "e", "r", "c", "n", "i", "t", "a"]
}

View File

@ -1,65 +0,0 @@
{
"hash": "b42739be",
"browserHash": "6908cbf3",
"optimized": {
"svelte/animate": {
"src": "../../svelte/animate/index.mjs",
"file": "svelte_animate.js",
"fileHash": "85b1f805",
"needsInterop": false
},
"svelte/easing": {
"src": "../../svelte/easing/index.mjs",
"file": "svelte_easing.js",
"fileHash": "5a53a669",
"needsInterop": false
},
"svelte/internal": {
"src": "../../svelte/internal/index.mjs",
"file": "svelte_internal.js",
"fileHash": "ff62f3ec",
"needsInterop": false
},
"svelte/motion": {
"src": "../../svelte/motion/index.mjs",
"file": "svelte_motion.js",
"fileHash": "40b7d0be",
"needsInterop": false
},
"svelte/store": {
"src": "../../svelte/store/index.mjs",
"file": "svelte_store.js",
"fileHash": "73b9b55b",
"needsInterop": false
},
"svelte/transition": {
"src": "../../svelte/transition/index.mjs",
"file": "svelte_transition.js",
"fileHash": "2b5dc27a",
"needsInterop": false
},
"svelte": {
"src": "../../svelte/index.mjs",
"file": "svelte.js",
"fileHash": "4a00aa3b",
"needsInterop": false
},
"@tauri-apps/api/tauri": {
"src": "../../@tauri-apps/api/tauri.js",
"file": "@tauri-apps_api_tauri.js",
"fileHash": "bd5f75d1",
"needsInterop": false
}
},
"chunks": {
"chunk-Y5HQ3MVM": {
"file": "chunk-Y5HQ3MVM.js"
},
"chunk-RX5AHRTA": {
"file": "chunk-RX5AHRTA.js"
},
"chunk-ZQ7TCJTX": {
"file": "chunk-ZQ7TCJTX.js"
}
}
}

View File

@ -1,150 +0,0 @@
// node_modules/svelte/easing/index.mjs
function backInOut(t) {
const s = 1.70158 * 1.525;
if ((t *= 2) < 1)
return 0.5 * (t * t * ((s + 1) * t - s));
return 0.5 * ((t -= 2) * t * ((s + 1) * t + s) + 2);
}
function backIn(t) {
const s = 1.70158;
return t * t * ((s + 1) * t - s);
}
function backOut(t) {
const s = 1.70158;
return --t * t * ((s + 1) * t + s) + 1;
}
function bounceOut(t) {
const a = 4 / 11;
const b = 8 / 11;
const c = 9 / 10;
const ca = 4356 / 361;
const cb = 35442 / 1805;
const cc = 16061 / 1805;
const t2 = t * t;
return t < a ? 7.5625 * t2 : t < b ? 9.075 * t2 - 9.9 * t + 3.4 : t < c ? ca * t2 - cb * t + cc : 10.8 * t * t - 20.52 * t + 10.72;
}
function bounceInOut(t) {
return t < 0.5 ? 0.5 * (1 - bounceOut(1 - t * 2)) : 0.5 * bounceOut(t * 2 - 1) + 0.5;
}
function bounceIn(t) {
return 1 - bounceOut(1 - t);
}
function circInOut(t) {
if ((t *= 2) < 1)
return -0.5 * (Math.sqrt(1 - t * t) - 1);
return 0.5 * (Math.sqrt(1 - (t -= 2) * t) + 1);
}
function circIn(t) {
return 1 - Math.sqrt(1 - t * t);
}
function circOut(t) {
return Math.sqrt(1 - --t * t);
}
function cubicInOut(t) {
return t < 0.5 ? 4 * t * t * t : 0.5 * Math.pow(2 * t - 2, 3) + 1;
}
function cubicIn(t) {
return t * t * t;
}
function cubicOut(t) {
const f = t - 1;
return f * f * f + 1;
}
function elasticInOut(t) {
return t < 0.5 ? 0.5 * Math.sin(13 * Math.PI / 2 * 2 * t) * Math.pow(2, 10 * (2 * t - 1)) : 0.5 * Math.sin(-13 * Math.PI / 2 * (2 * t - 1 + 1)) * Math.pow(2, -10 * (2 * t - 1)) + 1;
}
function elasticIn(t) {
return Math.sin(13 * t * Math.PI / 2) * Math.pow(2, 10 * (t - 1));
}
function elasticOut(t) {
return Math.sin(-13 * (t + 1) * Math.PI / 2) * Math.pow(2, -10 * t) + 1;
}
function expoInOut(t) {
return t === 0 || t === 1 ? t : t < 0.5 ? 0.5 * Math.pow(2, 20 * t - 10) : -0.5 * Math.pow(2, 10 - t * 20) + 1;
}
function expoIn(t) {
return t === 0 ? t : Math.pow(2, 10 * (t - 1));
}
function expoOut(t) {
return t === 1 ? t : 1 - Math.pow(2, -10 * t);
}
function quadInOut(t) {
t /= 0.5;
if (t < 1)
return 0.5 * t * t;
t--;
return -0.5 * (t * (t - 2) - 1);
}
function quadIn(t) {
return t * t;
}
function quadOut(t) {
return -t * (t - 2);
}
function quartInOut(t) {
return t < 0.5 ? 8 * Math.pow(t, 4) : -8 * Math.pow(t - 1, 4) + 1;
}
function quartIn(t) {
return Math.pow(t, 4);
}
function quartOut(t) {
return Math.pow(t - 1, 3) * (1 - t) + 1;
}
function quintInOut(t) {
if ((t *= 2) < 1)
return 0.5 * t * t * t * t * t;
return 0.5 * ((t -= 2) * t * t * t * t + 2);
}
function quintIn(t) {
return t * t * t * t * t;
}
function quintOut(t) {
return --t * t * t * t * t + 1;
}
function sineInOut(t) {
return -0.5 * (Math.cos(Math.PI * t) - 1);
}
function sineIn(t) {
const v = Math.cos(t * Math.PI * 0.5);
if (Math.abs(v) < 1e-14)
return 1;
else
return 1 - v;
}
function sineOut(t) {
return Math.sin(t * Math.PI / 2);
}
export {
backInOut,
backIn,
backOut,
bounceOut,
bounceInOut,
bounceIn,
circInOut,
circIn,
circOut,
cubicInOut,
cubicIn,
cubicOut,
elasticInOut,
elasticIn,
elasticOut,
expoInOut,
expoIn,
expoOut,
quadInOut,
quadIn,
quadOut,
quartInOut,
quartIn,
quartOut,
quintInOut,
quintIn,
quintOut,
sineInOut,
sineIn,
sineOut
};
//# sourceMappingURL=chunk-RX5AHRTA.js.map

View File

@ -1,7 +0,0 @@
{
"version": 3,
"sources": ["../../svelte/easing/index.mjs"],
"sourcesContent": ["export { identity as linear } from '../internal/index.mjs';\n\n/*\nAdapted from https://github.com/mattdesl\nDistributed under MIT License https://github.com/mattdesl/eases/blob/master/LICENSE.md\n*/\nfunction backInOut(t) {\n const s = 1.70158 * 1.525;\n if ((t *= 2) < 1)\n return 0.5 * (t * t * ((s + 1) * t - s));\n return 0.5 * ((t -= 2) * t * ((s + 1) * t + s) + 2);\n}\nfunction backIn(t) {\n const s = 1.70158;\n return t * t * ((s + 1) * t - s);\n}\nfunction backOut(t) {\n const s = 1.70158;\n return --t * t * ((s + 1) * t + s) + 1;\n}\nfunction bounceOut(t) {\n const a = 4.0 / 11.0;\n const b = 8.0 / 11.0;\n const c = 9.0 / 10.0;\n const ca = 4356.0 / 361.0;\n const cb = 35442.0 / 1805.0;\n const cc = 16061.0 / 1805.0;\n const t2 = t * t;\n return t < a\n ? 7.5625 * t2\n : t < b\n ? 9.075 * t2 - 9.9 * t + 3.4\n : t < c\n ? ca * t2 - cb * t + cc\n : 10.8 * t * t - 20.52 * t + 10.72;\n}\nfunction bounceInOut(t) {\n return t < 0.5\n ? 0.5 * (1.0 - bounceOut(1.0 - t * 2.0))\n : 0.5 * bounceOut(t * 2.0 - 1.0) + 0.5;\n}\nfunction bounceIn(t) {\n return 1.0 - bounceOut(1.0 - t);\n}\nfunction circInOut(t) {\n if ((t *= 2) < 1)\n return -0.5 * (Math.sqrt(1 - t * t) - 1);\n return 0.5 * (Math.sqrt(1 - (t -= 2) * t) + 1);\n}\nfunction circIn(t) {\n return 1.0 - Math.sqrt(1.0 - t * t);\n}\nfunction circOut(t) {\n return Math.sqrt(1 - --t * t);\n}\nfunction cubicInOut(t) {\n return t < 0.5 ? 4.0 * t * t * t : 0.5 * Math.pow(2.0 * t - 2.0, 3.0) + 1.0;\n}\nfunction cubicIn(t) {\n return t * t * t;\n}\nfunction cubicOut(t) {\n const f = t - 1.0;\n return f * f * f + 1.0;\n}\nfunction elasticInOut(t) {\n return t < 0.5\n ? 0.5 *\n Math.sin(((+13.0 * Math.PI) / 2) * 2.0 * t) *\n Math.pow(2.0, 10.0 * (2.0 * t - 1.0))\n : 0.5 *\n Math.sin(((-13.0 * Math.PI) / 2) * (2.0 * t - 1.0 + 1.0)) *\n Math.pow(2.0, -10.0 * (2.0 * t - 1.0)) +\n 1.0;\n}\nfunction elasticIn(t) {\n return Math.sin((13.0 * t * Math.PI) / 2) * Math.pow(2.0, 10.0 * (t - 1.0));\n}\nfunction elasticOut(t) {\n return (Math.sin((-13.0 * (t + 1.0) * Math.PI) / 2) * Math.pow(2.0, -10.0 * t) + 1.0);\n}\nfunction expoInOut(t) {\n return t === 0.0 || t === 1.0\n ? t\n : t < 0.5\n ? +0.5 * Math.pow(2.0, 20.0 * t - 10.0)\n : -0.5 * Math.pow(2.0, 10.0 - t * 20.0) + 1.0;\n}\nfunction expoIn(t) {\n return t === 0.0 ? t : Math.pow(2.0, 10.0 * (t - 1.0));\n}\nfunction expoOut(t) {\n return t === 1.0 ? t : 1.0 - Math.pow(2.0, -10.0 * t);\n}\nfunction quadInOut(t) {\n t /= 0.5;\n if (t < 1)\n return 0.5 * t * t;\n t--;\n return -0.5 * (t * (t - 2) - 1);\n}\nfunction quadIn(t) {\n return t * t;\n}\nfunction quadOut(t) {\n return -t * (t - 2.0);\n}\nfunction quartInOut(t) {\n return t < 0.5\n ? +8.0 * Math.pow(t, 4.0)\n : -8.0 * Math.pow(t - 1.0, 4.0) + 1.0;\n}\nfunction quartIn(t) {\n return Math.pow(t, 4.0);\n}\nfunction quartOut(t) {\n return Math.pow(t - 1.0, 3.0) * (1.0 - t) + 1.0;\n}\nfunction quintInOut(t) {\n if ((t *= 2) < 1)\n return 0.5 * t * t * t * t * t;\n return 0.5 * ((t -= 2) * t * t * t * t + 2);\n}\nfunction quintIn(t) {\n return t * t * t * t * t;\n}\nfunction quintOut(t) {\n return --t * t * t * t * t + 1;\n}\nfunction sineInOut(t) {\n return -0.5 * (Math.cos(Math.PI * t) - 1);\n}\nfunction sineIn(t) {\n const v = Math.cos(t * Math.PI * 0.5);\n if (Math.abs(v) < 1e-14)\n return 1;\n else\n return 1 - v;\n}\nfunction sineOut(t) {\n return Math.sin((t * Math.PI) / 2);\n}\n\nexport { backIn, backInOut, backOut, bounceIn, bounceInOut, bounceOut, circIn, circInOut, circOut, cubicIn, cubicInOut, cubicOut, elasticIn, elasticInOut, elasticOut, expoIn, expoInOut, expoOut, quadIn, quadInOut, quadOut, quartIn, quartInOut, quartOut, quintIn, quintInOut, quintOut, sineIn, sineInOut, sineOut };\n"],
"mappings": ";AAMA,SAAS,UAAU,GAAG;AAClB,QAAM,IAAI,UAAU;AACpB,OAAK,KAAK,KAAK;AACX,WAAO,OAAO,IAAI,MAAM,IAAI,KAAK,IAAI;AACzC,SAAO,QAAQ,KAAK,KAAK,MAAM,IAAI,KAAK,IAAI,KAAK;AACrD;AACA,SAAS,OAAO,GAAG;AACf,QAAM,IAAI;AACV,SAAO,IAAI,MAAM,IAAI,KAAK,IAAI;AAClC;AACA,SAAS,QAAQ,GAAG;AAChB,QAAM,IAAI;AACV,SAAO,EAAE,IAAI,MAAM,IAAI,KAAK,IAAI,KAAK;AACzC;AACA,SAAS,UAAU,GAAG;AAClB,QAAM,IAAI,IAAM;AAChB,QAAM,IAAI,IAAM;AAChB,QAAM,IAAI,IAAM;AAChB,QAAM,KAAK,OAAS;AACpB,QAAM,KAAK,QAAU;AACrB,QAAM,KAAK,QAAU;AACrB,QAAM,KAAK,IAAI;AACf,SAAO,IAAI,IACL,SAAS,KACT,IAAI,IACA,QAAQ,KAAK,MAAM,IAAI,MACvB,IAAI,IACA,KAAK,KAAK,KAAK,IAAI,KACnB,OAAO,IAAI,IAAI,QAAQ,IAAI;AAC7C;AACA,SAAS,YAAY,GAAG;AACpB,SAAO,IAAI,MACL,OAAO,IAAM,UAAU,IAAM,IAAI,CAAG,KACpC,MAAM,UAAU,IAAI,IAAM,CAAG,IAAI;AAC3C;AACA,SAAS,SAAS,GAAG;AACjB,SAAO,IAAM,UAAU,IAAM,CAAC;AAClC;AACA,SAAS,UAAU,GAAG;AAClB,OAAK,KAAK,KAAK;AACX,WAAO,QAAQ,KAAK,KAAK,IAAI,IAAI,CAAC,IAAI;AAC1C,SAAO,OAAO,KAAK,KAAK,KAAK,KAAK,KAAK,CAAC,IAAI;AAChD;AACA,SAAS,OAAO,GAAG;AACf,SAAO,IAAM,KAAK,KAAK,IAAM,IAAI,CAAC;AACtC;AACA,SAAS,QAAQ,GAAG;AAChB,SAAO,KAAK,KAAK,IAAI,EAAE,IAAI,CAAC;AAChC;AACA,SAAS,WAAW,GAAG;AACnB,SAAO,IAAI,MAAM,IAAM,IAAI,IAAI,IAAI,MAAM,KAAK,IAAI,IAAM,IAAI,GAAK,CAAG,IAAI;AAC5E;AACA,SAAS,QAAQ,GAAG;AAChB,SAAO,IAAI,IAAI;AACnB;AACA,SAAS,SAAS,GAAG;AACjB,QAAM,IAAI,IAAI;AACd,SAAO,IAAI,IAAI,IAAI;AACvB;AACA,SAAS,aAAa,GAAG;AACrB,SAAO,IAAI,MACL,MACE,KAAK,IAAM,KAAQ,KAAK,KAAM,IAAK,IAAM,CAAC,IAC1C,KAAK,IAAI,GAAK,MAAQ,IAAM,IAAI,EAAI,IACtC,MACE,KAAK,IAAM,MAAQ,KAAK,KAAM,KAAM,IAAM,IAAI,IAAM,EAAI,IACxD,KAAK,IAAI,GAAK,OAAS,IAAM,IAAI,EAAI,IACrC;AACZ;AACA,SAAS,UAAU,GAAG;AAClB,SAAO,KAAK,IAAK,KAAO,IAAI,KAAK,KAAM,CAAC,IAAI,KAAK,IAAI,GAAK,MAAQ,IAAI,EAAI;AAC9E;AACA,SAAS,WAAW,GAAG;AACnB,SAAQ,KAAK,IAAK,OAAS,IAAI,KAAO,KAAK,KAAM,CAAC,IAAI,KAAK,IAAI,GAAK,MAAQ,CAAC,IAAI;AACrF;AACA,SAAS,UAAU,GAAG;AAClB,SAAO,MAAM,KAAO,MAAM,IACpB,IACA,IAAI,MACA,MAAO,KAAK,IAAI,GAAK,KAAO,IAAI,EAAI,IACpC,OAAO,KAAK,IAAI,GAAK,KAAO,IAAI,EAAI,IAAI;AACtD;AACA,SAAS,OAAO,GAAG;AACf,SAAO,MAAM,IAAM,IAAI,KAAK,IAAI,GAAK,MAAQ,IAAI,EAAI;AACzD;AACA,SAAS,QAAQ,GAAG;AAChB,SAAO,MAAM,IAAM,IAAI,IAAM,KAAK,IAAI,GAAK,MAAQ,CAAC;AACxD;AACA,SAAS,UAAU,GAAG;AAClB,OAAK;AACL,MAAI,IAAI;AACJ,WAAO,MAAM,IAAI;AACrB;AACA,SAAO,QAAQ,KAAK,IAAI,KAAK;AACjC;AACA,SAAS,OAAO,GAAG;AACf,SAAO,IAAI;AACf;AACA,SAAS,QAAQ,GAAG;AAChB,SAAO,CAAC,KAAK,IAAI;AACrB;AACA,SAAS,WAAW,GAAG;AACnB,SAAO,IAAI,MACL,IAAO,KAAK,IAAI,GAAG,CAAG,IACtB,KAAO,KAAK,IAAI,IAAI,GAAK,CAAG,IAAI;AAC1C;AACA,SAAS,QAAQ,GAAG;AAChB,SAAO,KAAK,IAAI,GAAG,CAAG;AAC1B;AACA,SAAS,SAAS,GAAG;AACjB,SAAO,KAAK,IAAI,IAAI,GAAK,CAAG,KAAK,IAAM,KAAK;AAChD;AACA,SAAS,WAAW,GAAG;AACnB,OAAK,KAAK,KAAK;AACX,WAAO,MAAM,IAAI,IAAI,IAAI,IAAI;AACjC,SAAO,QAAQ,KAAK,KAAK,IAAI,IAAI,IAAI,IAAI;AAC7C;AACA,SAAS,QAAQ,GAAG;AAChB,SAAO,IAAI,IAAI,IAAI,IAAI;AAC3B;AACA,SAAS,SAAS,GAAG;AACjB,SAAO,EAAE,IAAI,IAAI,IAAI,IAAI,IAAI;AACjC;AACA,SAAS,UAAU,GAAG;AAClB,SAAO,QAAQ,KAAK,IAAI,KAAK,KAAK,CAAC,IAAI;AAC3C;AACA,SAAS,OAAO,GAAG;AACf,QAAM,IAAI,KAAK,IAAI,IAAI,KAAK,KAAK,GAAG;AACpC,MAAI,KAAK,IAAI,CAAC,IAAI;AACd,WAAO;AAAA;AAEP,WAAO,IAAI;AACnB;AACA,SAAS,QAAQ,GAAG;AAChB,SAAO,KAAK,IAAK,IAAI,KAAK,KAAM,CAAC;AACrC;",
"names": []
}

View File

@ -1,101 +0,0 @@
import {
is_function,
noop,
run_all,
safe_not_equal,
subscribe
} from "./chunk-ZQ7TCJTX.js";
// node_modules/svelte/store/index.mjs
var subscriber_queue = [];
function readable(value, start) {
return {
subscribe: writable(value, start).subscribe
};
}
function writable(value, start = noop) {
let stop;
const subscribers = /* @__PURE__ */ new Set();
function set(new_value) {
if (safe_not_equal(value, new_value)) {
value = new_value;
if (stop) {
const run_queue = !subscriber_queue.length;
for (const subscriber of subscribers) {
subscriber[1]();
subscriber_queue.push(subscriber, value);
}
if (run_queue) {
for (let i = 0; i < subscriber_queue.length; i += 2) {
subscriber_queue[i][0](subscriber_queue[i + 1]);
}
subscriber_queue.length = 0;
}
}
}
}
function update(fn) {
set(fn(value));
}
function subscribe2(run, invalidate = noop) {
const subscriber = [run, invalidate];
subscribers.add(subscriber);
if (subscribers.size === 1) {
stop = start(set) || noop;
}
run(value);
return () => {
subscribers.delete(subscriber);
if (subscribers.size === 0) {
stop();
stop = null;
}
};
}
return { set, update, subscribe: subscribe2 };
}
function derived(stores, fn, initial_value) {
const single = !Array.isArray(stores);
const stores_array = single ? [stores] : stores;
const auto = fn.length < 2;
return readable(initial_value, (set) => {
let inited = false;
const values = [];
let pending = 0;
let cleanup = noop;
const sync = () => {
if (pending) {
return;
}
cleanup();
const result = fn(single ? values[0] : values, set);
if (auto) {
set(result);
} else {
cleanup = is_function(result) ? result : noop;
}
};
const unsubscribers = stores_array.map((store, i) => subscribe(store, (value) => {
values[i] = value;
pending &= ~(1 << i);
if (inited) {
sync();
}
}, () => {
pending |= 1 << i;
}));
inited = true;
sync();
return function stop() {
run_all(unsubscribers);
cleanup();
};
});
}
export {
readable,
writable,
derived
};
//# sourceMappingURL=chunk-Y5HQ3MVM.js.map

View File

@ -1,7 +0,0 @@
{
"version": 3,
"sources": ["../../svelte/store/index.mjs"],
"sourcesContent": ["import { noop, safe_not_equal, subscribe, run_all, is_function } from '../internal/index.mjs';\nexport { get_store_value as get } from '../internal/index.mjs';\n\nconst subscriber_queue = [];\n/**\n * Creates a `Readable` store that allows reading by subscription.\n * @param value initial value\n * @param {StartStopNotifier}start start and stop notifications for subscriptions\n */\nfunction readable(value, start) {\n return {\n subscribe: writable(value, start).subscribe\n };\n}\n/**\n * Create a `Writable` store that allows both updating and reading by subscription.\n * @param {*=}value initial value\n * @param {StartStopNotifier=}start start and stop notifications for subscriptions\n */\nfunction writable(value, start = noop) {\n let stop;\n const subscribers = new Set();\n function set(new_value) {\n if (safe_not_equal(value, new_value)) {\n value = new_value;\n if (stop) { // store is ready\n const run_queue = !subscriber_queue.length;\n for (const subscriber of subscribers) {\n subscriber[1]();\n subscriber_queue.push(subscriber, value);\n }\n if (run_queue) {\n for (let i = 0; i < subscriber_queue.length; i += 2) {\n subscriber_queue[i][0](subscriber_queue[i + 1]);\n }\n subscriber_queue.length = 0;\n }\n }\n }\n }\n function update(fn) {\n set(fn(value));\n }\n function subscribe(run, invalidate = noop) {\n const subscriber = [run, invalidate];\n subscribers.add(subscriber);\n if (subscribers.size === 1) {\n stop = start(set) || noop;\n }\n run(value);\n return () => {\n subscribers.delete(subscriber);\n if (subscribers.size === 0) {\n stop();\n stop = null;\n }\n };\n }\n return { set, update, subscribe };\n}\nfunction derived(stores, fn, initial_value) {\n const single = !Array.isArray(stores);\n const stores_array = single\n ? [stores]\n : stores;\n const auto = fn.length < 2;\n return readable(initial_value, (set) => {\n let inited = false;\n const values = [];\n let pending = 0;\n let cleanup = noop;\n const sync = () => {\n if (pending) {\n return;\n }\n cleanup();\n const result = fn(single ? values[0] : values, set);\n if (auto) {\n set(result);\n }\n else {\n cleanup = is_function(result) ? result : noop;\n }\n };\n const unsubscribers = stores_array.map((store, i) => subscribe(store, (value) => {\n values[i] = value;\n pending &= ~(1 << i);\n if (inited) {\n sync();\n }\n }, () => {\n pending |= (1 << i);\n }));\n inited = true;\n sync();\n return function stop() {\n run_all(unsubscribers);\n cleanup();\n };\n });\n}\n\nexport { derived, readable, writable };\n"],
"mappings": ";;;;;;;;;AAGA,IAAM,mBAAmB,CAAC;AAM1B,SAAS,SAAS,OAAO,OAAO;AAC5B,SAAO;AAAA,IACH,WAAW,SAAS,OAAO,KAAK,EAAE;AAAA,EACtC;AACJ;AAMA,SAAS,SAAS,OAAO,QAAQ,MAAM;AACnC,MAAI;AACJ,QAAM,cAAc,oBAAI,IAAI;AAC5B,WAAS,IAAI,WAAW;AACpB,QAAI,eAAe,OAAO,SAAS,GAAG;AAClC,cAAQ;AACR,UAAI,MAAM;AACN,cAAM,YAAY,CAAC,iBAAiB;AACpC,mBAAW,cAAc,aAAa;AAClC,qBAAW,GAAG;AACd,2BAAiB,KAAK,YAAY,KAAK;AAAA,QAC3C;AACA,YAAI,WAAW;AACX,mBAAS,IAAI,GAAG,IAAI,iBAAiB,QAAQ,KAAK,GAAG;AACjD,6BAAiB,GAAG,GAAG,iBAAiB,IAAI,EAAE;AAAA,UAClD;AACA,2BAAiB,SAAS;AAAA,QAC9B;AAAA,MACJ;AAAA,IACJ;AAAA,EACJ;AACA,WAAS,OAAO,IAAI;AAChB,QAAI,GAAG,KAAK,CAAC;AAAA,EACjB;AACA,WAASA,WAAU,KAAK,aAAa,MAAM;AACvC,UAAM,aAAa,CAAC,KAAK,UAAU;AACnC,gBAAY,IAAI,UAAU;AAC1B,QAAI,YAAY,SAAS,GAAG;AACxB,aAAO,MAAM,GAAG,KAAK;AAAA,IACzB;AACA,QAAI,KAAK;AACT,WAAO,MAAM;AACT,kBAAY,OAAO,UAAU;AAC7B,UAAI,YAAY,SAAS,GAAG;AACxB,aAAK;AACL,eAAO;AAAA,MACX;AAAA,IACJ;AAAA,EACJ;AACA,SAAO,EAAE,KAAK,QAAQ,WAAAA,WAAU;AACpC;AACA,SAAS,QAAQ,QAAQ,IAAI,eAAe;AACxC,QAAM,SAAS,CAAC,MAAM,QAAQ,MAAM;AACpC,QAAM,eAAe,SACf,CAAC,MAAM,IACP;AACN,QAAM,OAAO,GAAG,SAAS;AACzB,SAAO,SAAS,eAAe,CAAC,QAAQ;AACpC,QAAI,SAAS;AACb,UAAM,SAAS,CAAC;AAChB,QAAI,UAAU;AACd,QAAI,UAAU;AACd,UAAM,OAAO,MAAM;AACf,UAAI,SAAS;AACT;AAAA,MACJ;AACA,cAAQ;AACR,YAAM,SAAS,GAAG,SAAS,OAAO,KAAK,QAAQ,GAAG;AAClD,UAAI,MAAM;AACN,YAAI,MAAM;AAAA,MACd,OACK;AACD,kBAAU,YAAY,MAAM,IAAI,SAAS;AAAA,MAC7C;AAAA,IACJ;AACA,UAAM,gBAAgB,aAAa,IAAI,CAAC,OAAO,MAAM,UAAU,OAAO,CAAC,UAAU;AAC7E,aAAO,KAAK;AACZ,iBAAW,EAAE,KAAK;AAClB,UAAI,QAAQ;AACR,aAAK;AAAA,MACT;AAAA,IACJ,GAAG,MAAM;AACL,iBAAY,KAAK;AAAA,IACrB,CAAC,CAAC;AACF,aAAS;AACT,SAAK;AACL,WAAO,SAAS,OAAO;AACnB,cAAQ,aAAa;AACrB,cAAQ;AAAA,IACZ;AAAA,EACJ,CAAC;AACL;",
"names": ["subscribe"]
}

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@ -1 +0,0 @@
{"type":"module"}

29
node_modules/.vite/deps/svelte.js generated vendored
View File

@ -1,29 +0,0 @@
import {
SvelteComponentDev,
SvelteComponentTyped,
afterUpdate,
beforeUpdate,
createEventDispatcher,
getAllContexts,
getContext,
hasContext,
onDestroy,
onMount,
setContext,
tick
} from "./chunk-ZQ7TCJTX.js";
export {
SvelteComponentDev as SvelteComponent,
SvelteComponentTyped,
afterUpdate,
beforeUpdate,
createEventDispatcher,
getAllContexts,
getContext,
hasContext,
onDestroy,
onMount,
setContext,
tick
};
//# sourceMappingURL=svelte.js.map

View File

@ -1,7 +0,0 @@
{
"version": 3,
"sources": [],
"sourcesContent": [],
"mappings": "",
"names": []
}

View File

@ -1,32 +0,0 @@
import {
cubicOut
} from "./chunk-RX5AHRTA.js";
import {
is_function
} from "./chunk-ZQ7TCJTX.js";
// node_modules/svelte/animate/index.mjs
function flip(node, { from, to }, params = {}) {
const style = getComputedStyle(node);
const transform = style.transform === "none" ? "" : style.transform;
const [ox, oy] = style.transformOrigin.split(" ").map(parseFloat);
const dx = from.left + from.width * ox / to.width - (to.left + ox);
const dy = from.top + from.height * oy / to.height - (to.top + oy);
const { delay = 0, duration = (d) => Math.sqrt(d) * 120, easing = cubicOut } = params;
return {
delay,
duration: is_function(duration) ? duration(Math.sqrt(dx * dx + dy * dy)) : duration,
easing,
css: (t, u) => {
const x = u * dx;
const y = u * dy;
const sx = t + u * from.width / to.width;
const sy = t + u * from.height / to.height;
return `transform: ${transform} translate(${x}px, ${y}px) scale(${sx}, ${sy});`;
}
};
}
export {
flip
};
//# sourceMappingURL=svelte_animate.js.map

View File

@ -1,7 +0,0 @@
{
"version": 3,
"sources": ["../../svelte/animate/index.mjs"],
"sourcesContent": ["import { cubicOut } from '../easing/index.mjs';\nimport { is_function } from '../internal/index.mjs';\n\nfunction flip(node, { from, to }, params = {}) {\n const style = getComputedStyle(node);\n const transform = style.transform === 'none' ? '' : style.transform;\n const [ox, oy] = style.transformOrigin.split(' ').map(parseFloat);\n const dx = (from.left + from.width * ox / to.width) - (to.left + ox);\n const dy = (from.top + from.height * oy / to.height) - (to.top + oy);\n const { delay = 0, duration = (d) => Math.sqrt(d) * 120, easing = cubicOut } = params;\n return {\n delay,\n duration: is_function(duration) ? duration(Math.sqrt(dx * dx + dy * dy)) : duration,\n easing,\n css: (t, u) => {\n const x = u * dx;\n const y = u * dy;\n const sx = t + u * from.width / to.width;\n const sy = t + u * from.height / to.height;\n return `transform: ${transform} translate(${x}px, ${y}px) scale(${sx}, ${sy});`;\n }\n };\n}\n\nexport { flip };\n"],
"mappings": ";;;;;;;;AAGA,SAAS,KAAK,MAAM,EAAE,MAAM,GAAG,GAAG,SAAS,CAAC,GAAG;AAC3C,QAAM,QAAQ,iBAAiB,IAAI;AACnC,QAAM,YAAY,MAAM,cAAc,SAAS,KAAK,MAAM;AAC1D,QAAM,CAAC,IAAI,EAAE,IAAI,MAAM,gBAAgB,MAAM,GAAG,EAAE,IAAI,UAAU;AAChE,QAAM,KAAM,KAAK,OAAO,KAAK,QAAQ,KAAK,GAAG,SAAU,GAAG,OAAO;AACjE,QAAM,KAAM,KAAK,MAAM,KAAK,SAAS,KAAK,GAAG,UAAW,GAAG,MAAM;AACjE,QAAM,EAAE,QAAQ,GAAG,WAAW,CAAC,MAAM,KAAK,KAAK,CAAC,IAAI,KAAK,SAAS,SAAS,IAAI;AAC/E,SAAO;AAAA,IACH;AAAA,IACA,UAAU,YAAY,QAAQ,IAAI,SAAS,KAAK,KAAK,KAAK,KAAK,KAAK,EAAE,CAAC,IAAI;AAAA,IAC3E;AAAA,IACA,KAAK,CAAC,GAAG,MAAM;AACX,YAAM,IAAI,IAAI;AACd,YAAM,IAAI,IAAI;AACd,YAAM,KAAK,IAAI,IAAI,KAAK,QAAQ,GAAG;AACnC,YAAM,KAAK,IAAI,IAAI,KAAK,SAAS,GAAG;AACpC,aAAO,cAAc,uBAAuB,QAAQ,cAAc,OAAO;AAAA,IAC7E;AAAA,EACJ;AACJ;",
"names": []
}

View File

@ -1,69 +0,0 @@
import {
backIn,
backInOut,
backOut,
bounceIn,
bounceInOut,
bounceOut,
circIn,
circInOut,
circOut,
cubicIn,
cubicInOut,
cubicOut,
elasticIn,
elasticInOut,
elasticOut,
expoIn,
expoInOut,
expoOut,
quadIn,
quadInOut,
quadOut,
quartIn,
quartInOut,
quartOut,
quintIn,
quintInOut,
quintOut,
sineIn,
sineInOut,
sineOut
} from "./chunk-RX5AHRTA.js";
import {
identity
} from "./chunk-ZQ7TCJTX.js";
export {
backIn,
backInOut,
backOut,
bounceIn,
bounceInOut,
bounceOut,
circIn,
circInOut,
circOut,
cubicIn,
cubicInOut,
cubicOut,
elasticIn,
elasticInOut,
elasticOut,
expoIn,
expoInOut,
expoOut,
identity as linear,
quadIn,
quadInOut,
quadOut,
quartIn,
quartInOut,
quartOut,
quintIn,
quintInOut,
quintOut,
sineIn,
sineInOut,
sineOut
};
//# sourceMappingURL=svelte_easing.js.map

View File

@ -1,7 +0,0 @@
{
"version": 3,
"sources": [],
"sourcesContent": [],
"mappings": "",
"names": []
}

View File

@ -1,359 +0,0 @@
import {
HtmlTag,
HtmlTagHydration,
SvelteComponent,
SvelteComponentDev,
SvelteComponentTyped,
SvelteElement,
action_destroyer,
add_attribute,
add_classes,
add_flush_callback,
add_location,
add_render_callback,
add_resize_listener,
add_styles,
add_transform,
afterUpdate,
append,
append_dev,
append_empty_stylesheet,
append_hydration,
append_hydration_dev,
append_styles,
assign,
attr,
attr_dev,
attribute_to_object,
beforeUpdate,
bind,
binding_callbacks,
blank_object,
bubble,
check_outros,
children,
claim_component,
claim_element,
claim_html_tag,
claim_space,
claim_svg_element,
claim_text,
clear_loops,
component_subscribe,
compute_rest_props,
compute_slots,
createEventDispatcher,
create_animation,
create_bidirectional_transition,
create_component,
create_in_transition,
create_out_transition,
create_slot,
create_ssr_component,
current_component,
custom_event,
dataset_dev,
debug,
destroy_block,
destroy_component,
destroy_each,
detach,
detach_after_dev,
detach_before_dev,
detach_between_dev,
detach_dev,
dirty_components,
dispatch_dev,
each,
element,
element_is,
empty,
end_hydrating,
escape,
escape_attribute_value,
escape_object,
exclude_internal_props,
fix_and_destroy_block,
fix_and_outro_and_destroy_block,
fix_position,
flush,
getAllContexts,
getContext,
get_all_dirty_from_scope,
get_binding_group_value,
get_current_component,
get_custom_elements_slots,
get_root_for_style,
get_slot_changes,
get_spread_object,
get_spread_update,
get_store_value,
globals,
group_outros,
handle_promise,
hasContext,
has_prop,
identity,
init,
insert,
insert_dev,
insert_hydration,
insert_hydration_dev,
intros,
invalid_attribute_name_character,
is_client,
is_crossorigin,
is_empty,
is_function,
is_promise,
is_void,
listen,
listen_dev,
loop,
loop_guard,
merge_ssr_styles,
missing_component,
mount_component,
noop,
not_equal,
now,
null_to_empty,
object_without_properties,
onDestroy,
onMount,
once,
outro_and_destroy_block,
prevent_default,
prop_dev,
query_selector_all,
raf,
run,
run_all,
safe_not_equal,
schedule_update,
select_multiple_value,
select_option,
select_options,
select_value,
self,
setContext,
set_attributes,
set_current_component,
set_custom_element_data,
set_data,
set_data_dev,
set_input_type,
set_input_value,
set_now,
set_raf,
set_store_value,
set_style,
set_svg_attributes,
space,
spread,
src_url_equal,
start_hydrating,
stop_propagation,
subscribe,
svg_element,
text,
tick,
time_ranges_to_array,
to_number,
toggle_class,
transition_in,
transition_out,
trusted,
update_await_block_branch,
update_keyed_each,
update_slot,
update_slot_base,
validate_component,
validate_dynamic_element,
validate_each_argument,
validate_each_keys,
validate_slots,
validate_store,
validate_void_dynamic_element,
xlink_attr
} from "./chunk-ZQ7TCJTX.js";
export {
HtmlTag,
HtmlTagHydration,
SvelteComponent,
SvelteComponentDev,
SvelteComponentTyped,
SvelteElement,
action_destroyer,
add_attribute,
add_classes,
add_flush_callback,
add_location,
add_render_callback,
add_resize_listener,
add_styles,
add_transform,
afterUpdate,
append,
append_dev,
append_empty_stylesheet,
append_hydration,
append_hydration_dev,
append_styles,
assign,
attr,
attr_dev,
attribute_to_object,
beforeUpdate,
bind,
binding_callbacks,
blank_object,
bubble,
check_outros,
children,
claim_component,
claim_element,
claim_html_tag,
claim_space,
claim_svg_element,
claim_text,
clear_loops,
component_subscribe,
compute_rest_props,
compute_slots,
createEventDispatcher,
create_animation,
create_bidirectional_transition,
create_component,
create_in_transition,
create_out_transition,
create_slot,
create_ssr_component,
current_component,
custom_event,
dataset_dev,
debug,
destroy_block,
destroy_component,
destroy_each,
detach,
detach_after_dev,
detach_before_dev,
detach_between_dev,
detach_dev,
dirty_components,
dispatch_dev,
each,
element,
element_is,
empty,
end_hydrating,
escape,
escape_attribute_value,
escape_object,
exclude_internal_props,
fix_and_destroy_block,
fix_and_outro_and_destroy_block,
fix_position,
flush,
getAllContexts,
getContext,
get_all_dirty_from_scope,
get_binding_group_value,
get_current_component,
get_custom_elements_slots,
get_root_for_style,
get_slot_changes,
get_spread_object,
get_spread_update,
get_store_value,
globals,
group_outros,
handle_promise,
hasContext,
has_prop,
identity,
init,
insert,
insert_dev,
insert_hydration,
insert_hydration_dev,
intros,
invalid_attribute_name_character,
is_client,
is_crossorigin,
is_empty,
is_function,
is_promise,
is_void,
listen,
listen_dev,
loop,
loop_guard,
merge_ssr_styles,
missing_component,
mount_component,
noop,
not_equal,
now,
null_to_empty,
object_without_properties,
onDestroy,
onMount,
once,
outro_and_destroy_block,
prevent_default,
prop_dev,
query_selector_all,
raf,
run,
run_all,
safe_not_equal,
schedule_update,
select_multiple_value,
select_option,
select_options,
select_value,
self,
setContext,
set_attributes,
set_current_component,
set_custom_element_data,
set_data,
set_data_dev,
set_input_type,
set_input_value,
set_now,
set_raf,
set_store_value,
set_style,
set_svg_attributes,
space,
spread,
src_url_equal,
start_hydrating,
stop_propagation,
subscribe,
svg_element,
text,
tick,
time_ranges_to_array,
to_number,
toggle_class,
transition_in,
transition_out,
trusted,
update_await_block_branch,
update_keyed_each,
update_slot,
update_slot_base,
validate_component,
validate_dynamic_element,
validate_each_argument,
validate_each_keys,
validate_slots,
validate_store,
validate_void_dynamic_element,
xlink_attr
};
//# sourceMappingURL=svelte_internal.js.map

View File

@ -1,7 +0,0 @@
{
"version": 3,
"sources": [],
"sourcesContent": [],
"mappings": "",
"names": []
}

View File

@ -1,207 +0,0 @@
import {
writable
} from "./chunk-Y5HQ3MVM.js";
import "./chunk-RX5AHRTA.js";
import {
assign,
identity,
loop,
now
} from "./chunk-ZQ7TCJTX.js";
// node_modules/svelte/motion/index.mjs
function is_date(obj) {
return Object.prototype.toString.call(obj) === "[object Date]";
}
function tick_spring(ctx, last_value, current_value, target_value) {
if (typeof current_value === "number" || is_date(current_value)) {
const delta = target_value - current_value;
const velocity = (current_value - last_value) / (ctx.dt || 1 / 60);
const spring2 = ctx.opts.stiffness * delta;
const damper = ctx.opts.damping * velocity;
const acceleration = (spring2 - damper) * ctx.inv_mass;
const d = (velocity + acceleration) * ctx.dt;
if (Math.abs(d) < ctx.opts.precision && Math.abs(delta) < ctx.opts.precision) {
return target_value;
} else {
ctx.settled = false;
return is_date(current_value) ? new Date(current_value.getTime() + d) : current_value + d;
}
} else if (Array.isArray(current_value)) {
return current_value.map((_, i) => tick_spring(ctx, last_value[i], current_value[i], target_value[i]));
} else if (typeof current_value === "object") {
const next_value = {};
for (const k in current_value) {
next_value[k] = tick_spring(ctx, last_value[k], current_value[k], target_value[k]);
}
return next_value;
} else {
throw new Error(`Cannot spring ${typeof current_value} values`);
}
}
function spring(value, opts = {}) {
const store = writable(value);
const { stiffness = 0.15, damping = 0.8, precision = 0.01 } = opts;
let last_time;
let task;
let current_token;
let last_value = value;
let target_value = value;
let inv_mass = 1;
let inv_mass_recovery_rate = 0;
let cancel_task = false;
function set(new_value, opts2 = {}) {
target_value = new_value;
const token = current_token = {};
if (value == null || opts2.hard || spring2.stiffness >= 1 && spring2.damping >= 1) {
cancel_task = true;
last_time = now();
last_value = new_value;
store.set(value = target_value);
return Promise.resolve();
} else if (opts2.soft) {
const rate = opts2.soft === true ? 0.5 : +opts2.soft;
inv_mass_recovery_rate = 1 / (rate * 60);
inv_mass = 0;
}
if (!task) {
last_time = now();
cancel_task = false;
task = loop((now2) => {
if (cancel_task) {
cancel_task = false;
task = null;
return false;
}
inv_mass = Math.min(inv_mass + inv_mass_recovery_rate, 1);
const ctx = {
inv_mass,
opts: spring2,
settled: true,
dt: (now2 - last_time) * 60 / 1e3
};
const next_value = tick_spring(ctx, last_value, value, target_value);
last_time = now2;
last_value = value;
store.set(value = next_value);
if (ctx.settled) {
task = null;
}
return !ctx.settled;
});
}
return new Promise((fulfil) => {
task.promise.then(() => {
if (token === current_token)
fulfil();
});
});
}
const spring2 = {
set,
update: (fn, opts2) => set(fn(target_value, value), opts2),
subscribe: store.subscribe,
stiffness,
damping,
precision
};
return spring2;
}
function get_interpolator(a, b) {
if (a === b || a !== a)
return () => a;
const type = typeof a;
if (type !== typeof b || Array.isArray(a) !== Array.isArray(b)) {
throw new Error("Cannot interpolate values of different type");
}
if (Array.isArray(a)) {
const arr = b.map((bi, i) => {
return get_interpolator(a[i], bi);
});
return (t) => arr.map((fn) => fn(t));
}
if (type === "object") {
if (!a || !b)
throw new Error("Object cannot be null");
if (is_date(a) && is_date(b)) {
a = a.getTime();
b = b.getTime();
const delta = b - a;
return (t) => new Date(a + t * delta);
}
const keys = Object.keys(b);
const interpolators = {};
keys.forEach((key) => {
interpolators[key] = get_interpolator(a[key], b[key]);
});
return (t) => {
const result = {};
keys.forEach((key) => {
result[key] = interpolators[key](t);
});
return result;
};
}
if (type === "number") {
const delta = b - a;
return (t) => a + t * delta;
}
throw new Error(`Cannot interpolate ${type} values`);
}
function tweened(value, defaults = {}) {
const store = writable(value);
let task;
let target_value = value;
function set(new_value, opts) {
if (value == null) {
store.set(value = new_value);
return Promise.resolve();
}
target_value = new_value;
let previous_task = task;
let started = false;
let { delay = 0, duration = 400, easing = identity, interpolate = get_interpolator } = assign(assign({}, defaults), opts);
if (duration === 0) {
if (previous_task) {
previous_task.abort();
previous_task = null;
}
store.set(value = target_value);
return Promise.resolve();
}
const start = now() + delay;
let fn;
task = loop((now2) => {
if (now2 < start)
return true;
if (!started) {
fn = interpolate(value, new_value);
if (typeof duration === "function")
duration = duration(value, new_value);
started = true;
}
if (previous_task) {
previous_task.abort();
previous_task = null;
}
const elapsed = now2 - start;
if (elapsed > duration) {
store.set(value = new_value);
return false;
}
store.set(value = fn(easing(elapsed / duration)));
return true;
});
return task.promise;
}
return {
set,
update: (fn, opts) => set(fn(target_value, value), opts),
subscribe: store.subscribe
};
}
export {
spring,
tweened
};
//# sourceMappingURL=svelte_motion.js.map

File diff suppressed because one or more lines are too long

View File

@ -1,15 +0,0 @@
import {
derived,
readable,
writable
} from "./chunk-Y5HQ3MVM.js";
import {
get_store_value
} from "./chunk-ZQ7TCJTX.js";
export {
derived,
get_store_value as get,
readable,
writable
};
//# sourceMappingURL=svelte_store.js.map

View File

@ -1,7 +0,0 @@
{
"version": 3,
"sources": [],
"sourcesContent": [],
"mappings": "",
"names": []
}

View File

@ -1,184 +0,0 @@
import {
cubicInOut,
cubicOut
} from "./chunk-RX5AHRTA.js";
import {
assign,
identity,
is_function
} from "./chunk-ZQ7TCJTX.js";
// node_modules/svelte/transition/index.mjs
function __rest(s, e) {
var t = {};
for (var p in s)
if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
t[p] = s[p];
if (s != null && typeof Object.getOwnPropertySymbols === "function")
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
t[p[i]] = s[p[i]];
}
return t;
}
function blur(node, { delay = 0, duration = 400, easing = cubicInOut, amount = 5, opacity = 0 } = {}) {
const style = getComputedStyle(node);
const target_opacity = +style.opacity;
const f = style.filter === "none" ? "" : style.filter;
const od = target_opacity * (1 - opacity);
return {
delay,
duration,
easing,
css: (_t, u) => `opacity: ${target_opacity - od * u}; filter: ${f} blur(${u * amount}px);`
};
}
function fade(node, { delay = 0, duration = 400, easing = identity } = {}) {
const o = +getComputedStyle(node).opacity;
return {
delay,
duration,
easing,
css: (t) => `opacity: ${t * o}`
};
}
function fly(node, { delay = 0, duration = 400, easing = cubicOut, x = 0, y = 0, opacity = 0 } = {}) {
const style = getComputedStyle(node);
const target_opacity = +style.opacity;
const transform = style.transform === "none" ? "" : style.transform;
const od = target_opacity * (1 - opacity);
return {
delay,
duration,
easing,
css: (t, u) => `
transform: ${transform} translate(${(1 - t) * x}px, ${(1 - t) * y}px);
opacity: ${target_opacity - od * u}`
};
}
function slide(node, { delay = 0, duration = 400, easing = cubicOut } = {}) {
const style = getComputedStyle(node);
const opacity = +style.opacity;
const height = parseFloat(style.height);
const padding_top = parseFloat(style.paddingTop);
const padding_bottom = parseFloat(style.paddingBottom);
const margin_top = parseFloat(style.marginTop);
const margin_bottom = parseFloat(style.marginBottom);
const border_top_width = parseFloat(style.borderTopWidth);
const border_bottom_width = parseFloat(style.borderBottomWidth);
return {
delay,
duration,
easing,
css: (t) => `overflow: hidden;opacity: ${Math.min(t * 20, 1) * opacity};height: ${t * height}px;padding-top: ${t * padding_top}px;padding-bottom: ${t * padding_bottom}px;margin-top: ${t * margin_top}px;margin-bottom: ${t * margin_bottom}px;border-top-width: ${t * border_top_width}px;border-bottom-width: ${t * border_bottom_width}px;`
};
}
function scale(node, { delay = 0, duration = 400, easing = cubicOut, start = 0, opacity = 0 } = {}) {
const style = getComputedStyle(node);
const target_opacity = +style.opacity;
const transform = style.transform === "none" ? "" : style.transform;
const sd = 1 - start;
const od = target_opacity * (1 - opacity);
return {
delay,
duration,
easing,
css: (_t, u) => `
transform: ${transform} scale(${1 - sd * u});
opacity: ${target_opacity - od * u}
`
};
}
function draw(node, { delay = 0, speed, duration, easing = cubicInOut } = {}) {
let len = node.getTotalLength();
const style = getComputedStyle(node);
if (style.strokeLinecap !== "butt") {
len += parseInt(style.strokeWidth);
}
if (duration === void 0) {
if (speed === void 0) {
duration = 800;
} else {
duration = len / speed;
}
} else if (typeof duration === "function") {
duration = duration(len);
}
return {
delay,
duration,
easing,
css: (t, u) => `stroke-dasharray: ${t * len} ${u * len}`
};
}
function crossfade(_a) {
var { fallback } = _a, defaults = __rest(_a, ["fallback"]);
const to_receive = /* @__PURE__ */ new Map();
const to_send = /* @__PURE__ */ new Map();
function crossfade2(from, node, params) {
const { delay = 0, duration = (d2) => Math.sqrt(d2) * 30, easing = cubicOut } = assign(assign({}, defaults), params);
const to = node.getBoundingClientRect();
const dx = from.left - to.left;
const dy = from.top - to.top;
const dw = from.width / to.width;
const dh = from.height / to.height;
const d = Math.sqrt(dx * dx + dy * dy);
const style = getComputedStyle(node);
const transform = style.transform === "none" ? "" : style.transform;
const opacity = +style.opacity;
return {
delay,
duration: is_function(duration) ? duration(d) : duration,
easing,
css: (t, u) => `
opacity: ${t * opacity};
transform-origin: top left;
transform: ${transform} translate(${u * dx}px,${u * dy}px) scale(${t + (1 - t) * dw}, ${t + (1 - t) * dh});
`
};
}
function transition(items, counterparts, intro) {
return (node, params) => {
items.set(params.key, {
rect: node.getBoundingClientRect()
});
return () => {
if (counterparts.has(params.key)) {
const { rect } = counterparts.get(params.key);
counterparts.delete(params.key);
return crossfade2(rect, node, params);
}
items.delete(params.key);
return fallback && fallback(node, params, intro);
};
};
}
return [
transition(to_send, to_receive, false),
transition(to_receive, to_send, true)
];
}
export {
blur,
crossfade,
draw,
fade,
fly,
scale,
slide
};
/*! *****************************************************************************
Copyright (c) Microsoft Corporation.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.
***************************************************************************** */
//# sourceMappingURL=svelte_transition.js.map

File diff suppressed because one or more lines are too long

View File

@ -1,19 +0,0 @@
Copyright 2019 Justin Ridgewell <jridgewell@google.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,40 +0,0 @@
# @jridgewell/resolve-uri
> Resolve a URI relative to an optional base URI
Resolve any combination of absolute URIs, protocol-realtive URIs, absolute paths, or relative paths.
## Installation
```sh
npm install @jridgewell/resolve-uri
```
## Usage
```typescript
function resolve(input: string, base?: string): string;
```
```js
import resolve from '@jridgewell/resolve-uri';
resolve('foo', 'https://example.com'); // => 'https://example.com/foo'
```
| Input | Base | Resolution | Explanation |
|-----------------------|-------------------------|--------------------------------|--------------------------------------------------------------|
| `https://example.com` | _any_ | `https://example.com/` | Input is normalized only |
| `//example.com` | `https://base.com/` | `https://example.com/` | Input inherits the base's protocol |
| `//example.com` | _rest_ | `//example.com/` | Input is normalized only |
| `/example` | `https://base.com/` | `https://base.com/example` | Input inherits the base's origin |
| `/example` | `//base.com/` | `//base.com/example` | Input inherits the base's host and remains protocol relative |
| `/example` | _rest_ | `/example` | Input is normalized only |
| `example` | `https://base.com/dir/` | `https://base.com/dir/example` | Input is joined with the base |
| `example` | `https://base.com/file` | `https://base.com/example` | Input is joined with the base without its file |
| `example` | `//base.com/dir/` | `//base.com/dir/example` | Input is joined with the base's last directory |
| `example` | `//base.com/file` | `//base.com/example` | Input is joined with the base without its file |
| `example` | `/base/dir/` | `/base/dir/example` | Input is joined with the base's last directory |
| `example` | `/base/file` | `/base/example` | Input is joined with the base without its file |
| `example` | `base/dir/` | `base/dir/example` | Input is joined with the base's last directory |
| `example` | `base/file` | `base/example` | Input is joined with the base without its file |

View File

@ -1,242 +0,0 @@
// Matches the scheme of a URL, eg "http://"
const schemeRegex = /^[\w+.-]+:\/\//;
/**
* Matches the parts of a URL:
* 1. Scheme, including ":", guaranteed.
* 2. User/password, including "@", optional.
* 3. Host, guaranteed.
* 4. Port, including ":", optional.
* 5. Path, including "/", optional.
* 6. Query, including "?", optional.
* 7. Hash, including "#", optional.
*/
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
/**
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
*
* 1. Host, optional.
* 2. Path, which may include "/", guaranteed.
* 3. Query, including "?", optional.
* 4. Hash, including "#", optional.
*/
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
var UrlType;
(function (UrlType) {
UrlType[UrlType["Empty"] = 1] = "Empty";
UrlType[UrlType["Hash"] = 2] = "Hash";
UrlType[UrlType["Query"] = 3] = "Query";
UrlType[UrlType["RelativePath"] = 4] = "RelativePath";
UrlType[UrlType["AbsolutePath"] = 5] = "AbsolutePath";
UrlType[UrlType["SchemeRelative"] = 6] = "SchemeRelative";
UrlType[UrlType["Absolute"] = 7] = "Absolute";
})(UrlType || (UrlType = {}));
function isAbsoluteUrl(input) {
return schemeRegex.test(input);
}
function isSchemeRelativeUrl(input) {
return input.startsWith('//');
}
function isAbsolutePath(input) {
return input.startsWith('/');
}
function isFileUrl(input) {
return input.startsWith('file:');
}
function isRelative(input) {
return /^[.?#]/.test(input);
}
function parseAbsoluteUrl(input) {
const match = urlRegex.exec(input);
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
}
function parseFileUrl(input) {
const match = fileRegex.exec(input);
const path = match[2];
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
}
function makeUrl(scheme, user, host, port, path, query, hash) {
return {
scheme,
user,
host,
port,
path,
query,
hash,
type: UrlType.Absolute,
};
}
function parseUrl(input) {
if (isSchemeRelativeUrl(input)) {
const url = parseAbsoluteUrl('http:' + input);
url.scheme = '';
url.type = UrlType.SchemeRelative;
return url;
}
if (isAbsolutePath(input)) {
const url = parseAbsoluteUrl('http://foo.com' + input);
url.scheme = '';
url.host = '';
url.type = UrlType.AbsolutePath;
return url;
}
if (isFileUrl(input))
return parseFileUrl(input);
if (isAbsoluteUrl(input))
return parseAbsoluteUrl(input);
const url = parseAbsoluteUrl('http://foo.com/' + input);
url.scheme = '';
url.host = '';
url.type = input
? input.startsWith('?')
? UrlType.Query
: input.startsWith('#')
? UrlType.Hash
: UrlType.RelativePath
: UrlType.Empty;
return url;
}
function stripPathFilename(path) {
// If a path ends with a parent directory "..", then it's a relative path with excess parent
// paths. It's not a file, so we can't strip it.
if (path.endsWith('/..'))
return path;
const index = path.lastIndexOf('/');
return path.slice(0, index + 1);
}
function mergePaths(url, base) {
normalizePath(base, base.type);
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
// path).
if (url.path === '/') {
url.path = base.path;
}
else {
// Resolution happens relative to the base path's directory, not the file.
url.path = stripPathFilename(base.path) + url.path;
}
}
/**
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
* "foo/.". We need to normalize to a standard representation.
*/
function normalizePath(url, type) {
const rel = type <= UrlType.RelativePath;
const pieces = url.path.split('/');
// We need to preserve the first piece always, so that we output a leading slash. The item at
// pieces[0] is an empty string.
let pointer = 1;
// Positive is the number of real directories we've output, used for popping a parent directory.
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
let positive = 0;
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
// real directory, we won't need to append, unless the other conditions happen again.
let addTrailingSlash = false;
for (let i = 1; i < pieces.length; i++) {
const piece = pieces[i];
// An empty directory, could be a trailing slash, or just a double "//" in the path.
if (!piece) {
addTrailingSlash = true;
continue;
}
// If we encounter a real directory, then we don't need to append anymore.
addTrailingSlash = false;
// A current directory, which we can always drop.
if (piece === '.')
continue;
// A parent directory, we need to see if there are any real directories we can pop. Else, we
// have an excess of parents, and we'll need to keep the "..".
if (piece === '..') {
if (positive) {
addTrailingSlash = true;
positive--;
pointer--;
}
else if (rel) {
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
pieces[pointer++] = piece;
}
continue;
}
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
// any popped or dropped directories.
pieces[pointer++] = piece;
positive++;
}
let path = '';
for (let i = 1; i < pointer; i++) {
path += '/' + pieces[i];
}
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
path += '/';
}
url.path = path;
}
/**
* Attempts to resolve `input` URL/path relative to `base`.
*/
function resolve(input, base) {
if (!input && !base)
return '';
const url = parseUrl(input);
let inputType = url.type;
if (base && inputType !== UrlType.Absolute) {
const baseUrl = parseUrl(base);
const baseType = baseUrl.type;
switch (inputType) {
case UrlType.Empty:
url.hash = baseUrl.hash;
// fall through
case UrlType.Hash:
url.query = baseUrl.query;
// fall through
case UrlType.Query:
case UrlType.RelativePath:
mergePaths(url, baseUrl);
// fall through
case UrlType.AbsolutePath:
// The host, user, and port are joined, you can't copy one without the others.
url.user = baseUrl.user;
url.host = baseUrl.host;
url.port = baseUrl.port;
// fall through
case UrlType.SchemeRelative:
// The input doesn't have a schema at least, so we need to copy at least that over.
url.scheme = baseUrl.scheme;
}
if (baseType > inputType)
inputType = baseType;
}
normalizePath(url, inputType);
const queryHash = url.query + url.hash;
switch (inputType) {
// This is impossible, because of the empty checks at the start of the function.
// case UrlType.Empty:
case UrlType.Hash:
case UrlType.Query:
return queryHash;
case UrlType.RelativePath: {
// The first char is always a "/", and we need it to be relative.
const path = url.path.slice(1);
if (!path)
return queryHash || '.';
if (isRelative(base || input) && !isRelative(path)) {
// If base started with a leading ".", or there is no base and input started with a ".",
// then we need to ensure that the relative path starts with a ".". We don't know if
// relative starts with a "..", though, so check before prepending.
return './' + path + queryHash;
}
return path + queryHash;
}
case UrlType.AbsolutePath:
return url.path + queryHash;
default:
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
}
}
export { resolve as default };
//# sourceMappingURL=resolve-uri.mjs.map

File diff suppressed because one or more lines are too long

View File

@ -1,250 +0,0 @@
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.resolveURI = factory());
})(this, (function () { 'use strict';
// Matches the scheme of a URL, eg "http://"
const schemeRegex = /^[\w+.-]+:\/\//;
/**
* Matches the parts of a URL:
* 1. Scheme, including ":", guaranteed.
* 2. User/password, including "@", optional.
* 3. Host, guaranteed.
* 4. Port, including ":", optional.
* 5. Path, including "/", optional.
* 6. Query, including "?", optional.
* 7. Hash, including "#", optional.
*/
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
/**
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
*
* 1. Host, optional.
* 2. Path, which may include "/", guaranteed.
* 3. Query, including "?", optional.
* 4. Hash, including "#", optional.
*/
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
var UrlType;
(function (UrlType) {
UrlType[UrlType["Empty"] = 1] = "Empty";
UrlType[UrlType["Hash"] = 2] = "Hash";
UrlType[UrlType["Query"] = 3] = "Query";
UrlType[UrlType["RelativePath"] = 4] = "RelativePath";
UrlType[UrlType["AbsolutePath"] = 5] = "AbsolutePath";
UrlType[UrlType["SchemeRelative"] = 6] = "SchemeRelative";
UrlType[UrlType["Absolute"] = 7] = "Absolute";
})(UrlType || (UrlType = {}));
function isAbsoluteUrl(input) {
return schemeRegex.test(input);
}
function isSchemeRelativeUrl(input) {
return input.startsWith('//');
}
function isAbsolutePath(input) {
return input.startsWith('/');
}
function isFileUrl(input) {
return input.startsWith('file:');
}
function isRelative(input) {
return /^[.?#]/.test(input);
}
function parseAbsoluteUrl(input) {
const match = urlRegex.exec(input);
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
}
function parseFileUrl(input) {
const match = fileRegex.exec(input);
const path = match[2];
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
}
function makeUrl(scheme, user, host, port, path, query, hash) {
return {
scheme,
user,
host,
port,
path,
query,
hash,
type: UrlType.Absolute,
};
}
function parseUrl(input) {
if (isSchemeRelativeUrl(input)) {
const url = parseAbsoluteUrl('http:' + input);
url.scheme = '';
url.type = UrlType.SchemeRelative;
return url;
}
if (isAbsolutePath(input)) {
const url = parseAbsoluteUrl('http://foo.com' + input);
url.scheme = '';
url.host = '';
url.type = UrlType.AbsolutePath;
return url;
}
if (isFileUrl(input))
return parseFileUrl(input);
if (isAbsoluteUrl(input))
return parseAbsoluteUrl(input);
const url = parseAbsoluteUrl('http://foo.com/' + input);
url.scheme = '';
url.host = '';
url.type = input
? input.startsWith('?')
? UrlType.Query
: input.startsWith('#')
? UrlType.Hash
: UrlType.RelativePath
: UrlType.Empty;
return url;
}
function stripPathFilename(path) {
// If a path ends with a parent directory "..", then it's a relative path with excess parent
// paths. It's not a file, so we can't strip it.
if (path.endsWith('/..'))
return path;
const index = path.lastIndexOf('/');
return path.slice(0, index + 1);
}
function mergePaths(url, base) {
normalizePath(base, base.type);
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
// path).
if (url.path === '/') {
url.path = base.path;
}
else {
// Resolution happens relative to the base path's directory, not the file.
url.path = stripPathFilename(base.path) + url.path;
}
}
/**
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
* "foo/.". We need to normalize to a standard representation.
*/
function normalizePath(url, type) {
const rel = type <= UrlType.RelativePath;
const pieces = url.path.split('/');
// We need to preserve the first piece always, so that we output a leading slash. The item at
// pieces[0] is an empty string.
let pointer = 1;
// Positive is the number of real directories we've output, used for popping a parent directory.
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
let positive = 0;
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
// real directory, we won't need to append, unless the other conditions happen again.
let addTrailingSlash = false;
for (let i = 1; i < pieces.length; i++) {
const piece = pieces[i];
// An empty directory, could be a trailing slash, or just a double "//" in the path.
if (!piece) {
addTrailingSlash = true;
continue;
}
// If we encounter a real directory, then we don't need to append anymore.
addTrailingSlash = false;
// A current directory, which we can always drop.
if (piece === '.')
continue;
// A parent directory, we need to see if there are any real directories we can pop. Else, we
// have an excess of parents, and we'll need to keep the "..".
if (piece === '..') {
if (positive) {
addTrailingSlash = true;
positive--;
pointer--;
}
else if (rel) {
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
pieces[pointer++] = piece;
}
continue;
}
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
// any popped or dropped directories.
pieces[pointer++] = piece;
positive++;
}
let path = '';
for (let i = 1; i < pointer; i++) {
path += '/' + pieces[i];
}
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
path += '/';
}
url.path = path;
}
/**
* Attempts to resolve `input` URL/path relative to `base`.
*/
function resolve(input, base) {
if (!input && !base)
return '';
const url = parseUrl(input);
let inputType = url.type;
if (base && inputType !== UrlType.Absolute) {
const baseUrl = parseUrl(base);
const baseType = baseUrl.type;
switch (inputType) {
case UrlType.Empty:
url.hash = baseUrl.hash;
// fall through
case UrlType.Hash:
url.query = baseUrl.query;
// fall through
case UrlType.Query:
case UrlType.RelativePath:
mergePaths(url, baseUrl);
// fall through
case UrlType.AbsolutePath:
// The host, user, and port are joined, you can't copy one without the others.
url.user = baseUrl.user;
url.host = baseUrl.host;
url.port = baseUrl.port;
// fall through
case UrlType.SchemeRelative:
// The input doesn't have a schema at least, so we need to copy at least that over.
url.scheme = baseUrl.scheme;
}
if (baseType > inputType)
inputType = baseType;
}
normalizePath(url, inputType);
const queryHash = url.query + url.hash;
switch (inputType) {
// This is impossible, because of the empty checks at the start of the function.
// case UrlType.Empty:
case UrlType.Hash:
case UrlType.Query:
return queryHash;
case UrlType.RelativePath: {
// The first char is always a "/", and we need it to be relative.
const path = url.path.slice(1);
if (!path)
return queryHash || '.';
if (isRelative(base || input) && !isRelative(path)) {
// If base started with a leading ".", or there is no base and input started with a ".",
// then we need to ensure that the relative path starts with a ".". We don't know if
// relative starts with a "..", though, so check before prepending.
return './' + path + queryHash;
}
return path + queryHash;
}
case UrlType.AbsolutePath:
return url.path + queryHash;
default:
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
}
}
return resolve;
}));
//# sourceMappingURL=resolve-uri.umd.js.map

File diff suppressed because one or more lines are too long

View File

@ -1,4 +0,0 @@
/**
* Attempts to resolve `input` URL/path relative to `base`.
*/
export default function resolve(input: string, base: string | undefined): string;

View File

@ -1,69 +0,0 @@
{
"name": "@jridgewell/resolve-uri",
"version": "3.1.0",
"description": "Resolve a URI relative to an optional base URI",
"keywords": [
"resolve",
"uri",
"url",
"path"
],
"author": "Justin Ridgewell <justin@ridgewell.name>",
"license": "MIT",
"repository": "https://github.com/jridgewell/resolve-uri",
"main": "dist/resolve-uri.umd.js",
"module": "dist/resolve-uri.mjs",
"typings": "dist/types/resolve-uri.d.ts",
"exports": {
".": [
{
"types": "./dist/types/resolve-uri.d.ts",
"browser": "./dist/resolve-uri.umd.js",
"require": "./dist/resolve-uri.umd.js",
"import": "./dist/resolve-uri.mjs"
},
"./dist/resolve-uri.umd.js"
],
"./package.json": "./package.json"
},
"files": [
"dist"
],
"engines": {
"node": ">=6.0.0"
},
"scripts": {
"prebuild": "rm -rf dist",
"build": "run-s -n build:*",
"build:rollup": "rollup -c rollup.config.js",
"build:ts": "tsc --project tsconfig.build.json",
"lint": "run-s -n lint:*",
"lint:prettier": "npm run test:lint:prettier -- --write",
"lint:ts": "npm run test:lint:ts -- --fix",
"pretest": "run-s build:rollup",
"test": "run-s -n test:lint test:only",
"test:debug": "mocha --inspect-brk",
"test:lint": "run-s -n test:lint:*",
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
"test:only": "mocha",
"test:coverage": "c8 mocha",
"test:watch": "mocha --watch",
"prepublishOnly": "npm run preversion",
"preversion": "run-s test build"
},
"devDependencies": {
"@jridgewell/resolve-uri-latest": "npm:@jridgewell/resolve-uri@*",
"@rollup/plugin-typescript": "8.3.0",
"@typescript-eslint/eslint-plugin": "5.10.0",
"@typescript-eslint/parser": "5.10.0",
"c8": "7.11.0",
"eslint": "8.7.0",
"eslint-config-prettier": "8.3.0",
"mocha": "9.2.0",
"npm-run-all": "4.1.5",
"prettier": "2.5.1",
"rollup": "2.66.0",
"typescript": "4.5.5"
}
}

View File

@ -1,21 +0,0 @@
The MIT License
Copyright (c) 2015 Rich Harris
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -1,200 +0,0 @@
# sourcemap-codec
Encode/decode the `mappings` property of a [sourcemap](https://docs.google.com/document/d/1U1RGAehQwRypUTovF1KRlpiOFze0b-_2gc6fAH0KY0k/edit).
## Why?
Sourcemaps are difficult to generate and manipulate, because the `mappings` property the part that actually links the generated code back to the original source is encoded using an obscure method called [Variable-length quantity](https://en.wikipedia.org/wiki/Variable-length_quantity). On top of that, each segment in the mapping contains offsets rather than absolute indices, which means that you can't look at a segment in isolation you have to understand the whole sourcemap.
This package makes the process slightly easier.
## Installation
```bash
npm install sourcemap-codec
```
## Usage
```js
import { encode, decode } from 'sourcemap-codec';
var decoded = decode( ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
assert.deepEqual( decoded, [
// the first line (of the generated code) has no mappings,
// as shown by the starting semi-colon (which separates lines)
[],
// the second line contains four (comma-separated) segments
[
// segments are encoded as you'd expect:
// [ generatedCodeColumn, sourceIndex, sourceCodeLine, sourceCodeColumn, nameIndex ]
// i.e. the first segment begins at column 2, and maps back to the second column
// of the second line (both zero-based) of the 0th source, and uses the 0th
// name in the `map.names` array
[ 2, 0, 2, 2, 0 ],
// the remaining segments are 4-length rather than 5-length,
// because they don't map a name
[ 4, 0, 2, 4 ],
[ 6, 0, 2, 5 ],
[ 7, 0, 2, 7 ]
],
// the final line contains two segments
[
[ 2, 1, 10, 19 ],
[ 12, 1, 11, 20 ]
]
]);
var encoded = encode( decoded );
assert.equal( encoded, ';EAEEA,EAAE,EAAC,CAAE;ECQY,UACC' );
```
## Benchmarks
```
node v18.0.0
amp.js.map - 45120 segments
Decode Memory Usage:
@jridgewell/sourcemap-codec 5479160 bytes
sourcemap-codec 5659336 bytes
source-map-0.6.1 17144440 bytes
source-map-0.8.0 6867424 bytes
Smallest memory usage is @jridgewell/sourcemap-codec
Decode speed:
decode: @jridgewell/sourcemap-codec x 502 ops/sec ±1.03% (90 runs sampled)
decode: sourcemap-codec x 445 ops/sec ±0.97% (92 runs sampled)
decode: source-map-0.6.1 x 36.01 ops/sec ±1.64% (49 runs sampled)
decode: source-map-0.8.0 x 367 ops/sec ±0.04% (95 runs sampled)
Fastest is decode: @jridgewell/sourcemap-codec
Encode Memory Usage:
@jridgewell/sourcemap-codec 1261620 bytes
sourcemap-codec 9119248 bytes
source-map-0.6.1 8968560 bytes
source-map-0.8.0 8952952 bytes
Smallest memory usage is @jridgewell/sourcemap-codec
Encode speed:
encode: @jridgewell/sourcemap-codec x 738 ops/sec ±0.42% (98 runs sampled)
encode: sourcemap-codec x 238 ops/sec ±0.73% (88 runs sampled)
encode: source-map-0.6.1 x 162 ops/sec ±0.43% (84 runs sampled)
encode: source-map-0.8.0 x 191 ops/sec ±0.34% (90 runs sampled)
Fastest is encode: @jridgewell/sourcemap-codec
***
babel.min.js.map - 347793 segments
Decode Memory Usage:
@jridgewell/sourcemap-codec 35338184 bytes
sourcemap-codec 35922736 bytes
source-map-0.6.1 62366360 bytes
source-map-0.8.0 44337416 bytes
Smallest memory usage is @jridgewell/sourcemap-codec
Decode speed:
decode: @jridgewell/sourcemap-codec x 40.35 ops/sec ±4.47% (54 runs sampled)
decode: sourcemap-codec x 36.76 ops/sec ±3.67% (51 runs sampled)
decode: source-map-0.6.1 x 4.44 ops/sec ±2.15% (16 runs sampled)
decode: source-map-0.8.0 x 59.35 ops/sec ±0.05% (78 runs sampled)
Fastest is decode: source-map-0.8.0
Encode Memory Usage:
@jridgewell/sourcemap-codec 7212604 bytes
sourcemap-codec 21421456 bytes
source-map-0.6.1 25286888 bytes
source-map-0.8.0 25498744 bytes
Smallest memory usage is @jridgewell/sourcemap-codec
Encode speed:
encode: @jridgewell/sourcemap-codec x 112 ops/sec ±0.13% (84 runs sampled)
encode: sourcemap-codec x 30.23 ops/sec ±2.76% (53 runs sampled)
encode: source-map-0.6.1 x 19.43 ops/sec ±3.70% (37 runs sampled)
encode: source-map-0.8.0 x 19.40 ops/sec ±3.26% (37 runs sampled)
Fastest is encode: @jridgewell/sourcemap-codec
***
preact.js.map - 1992 segments
Decode Memory Usage:
@jridgewell/sourcemap-codec 500272 bytes
sourcemap-codec 516864 bytes
source-map-0.6.1 1596672 bytes
source-map-0.8.0 517272 bytes
Smallest memory usage is @jridgewell/sourcemap-codec
Decode speed:
decode: @jridgewell/sourcemap-codec x 16,137 ops/sec ±0.17% (99 runs sampled)
decode: sourcemap-codec x 12,139 ops/sec ±0.13% (99 runs sampled)
decode: source-map-0.6.1 x 1,264 ops/sec ±0.12% (100 runs sampled)
decode: source-map-0.8.0 x 9,894 ops/sec ±0.08% (101 runs sampled)
Fastest is decode: @jridgewell/sourcemap-codec
Encode Memory Usage:
@jridgewell/sourcemap-codec 321026 bytes
sourcemap-codec 830832 bytes
source-map-0.6.1 586608 bytes
source-map-0.8.0 586680 bytes
Smallest memory usage is @jridgewell/sourcemap-codec
Encode speed:
encode: @jridgewell/sourcemap-codec x 19,876 ops/sec ±0.78% (95 runs sampled)
encode: sourcemap-codec x 6,983 ops/sec ±0.15% (100 runs sampled)
encode: source-map-0.6.1 x 5,070 ops/sec ±0.12% (102 runs sampled)
encode: source-map-0.8.0 x 5,641 ops/sec ±0.17% (100 runs sampled)
Fastest is encode: @jridgewell/sourcemap-codec
***
react.js.map - 5726 segments
Decode Memory Usage:
@jridgewell/sourcemap-codec 734848 bytes
sourcemap-codec 954200 bytes
source-map-0.6.1 2276432 bytes
source-map-0.8.0 955488 bytes
Smallest memory usage is @jridgewell/sourcemap-codec
Decode speed:
decode: @jridgewell/sourcemap-codec x 5,723 ops/sec ±0.12% (98 runs sampled)
decode: sourcemap-codec x 4,555 ops/sec ±0.09% (101 runs sampled)
decode: source-map-0.6.1 x 437 ops/sec ±0.11% (93 runs sampled)
decode: source-map-0.8.0 x 3,441 ops/sec ±0.15% (100 runs sampled)
Fastest is decode: @jridgewell/sourcemap-codec
Encode Memory Usage:
@jridgewell/sourcemap-codec 638672 bytes
sourcemap-codec 1109840 bytes
source-map-0.6.1 1321224 bytes
source-map-0.8.0 1324448 bytes
Smallest memory usage is @jridgewell/sourcemap-codec
Encode speed:
encode: @jridgewell/sourcemap-codec x 6,801 ops/sec ±0.48% (98 runs sampled)
encode: sourcemap-codec x 2,533 ops/sec ±0.13% (101 runs sampled)
encode: source-map-0.6.1 x 2,248 ops/sec ±0.08% (100 runs sampled)
encode: source-map-0.8.0 x 2,303 ops/sec ±0.15% (100 runs sampled)
Fastest is encode: @jridgewell/sourcemap-codec
```
# License
MIT

View File

@ -1,164 +0,0 @@
const comma = ','.charCodeAt(0);
const semicolon = ';'.charCodeAt(0);
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
const intToChar = new Uint8Array(64); // 64 possible chars.
const charToInt = new Uint8Array(128); // z is 122 in ASCII
for (let i = 0; i < chars.length; i++) {
const c = chars.charCodeAt(i);
intToChar[i] = c;
charToInt[c] = i;
}
// Provide a fallback for older environments.
const td = typeof TextDecoder !== 'undefined'
? /* #__PURE__ */ new TextDecoder()
: typeof Buffer !== 'undefined'
? {
decode(buf) {
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
return out.toString();
},
}
: {
decode(buf) {
let out = '';
for (let i = 0; i < buf.length; i++) {
out += String.fromCharCode(buf[i]);
}
return out;
},
};
function decode(mappings) {
const state = new Int32Array(5);
const decoded = [];
let index = 0;
do {
const semi = indexOf(mappings, index);
const line = [];
let sorted = true;
let lastCol = 0;
state[0] = 0;
for (let i = index; i < semi; i++) {
let seg;
i = decodeInteger(mappings, i, state, 0); // genColumn
const col = state[0];
if (col < lastCol)
sorted = false;
lastCol = col;
if (hasMoreVlq(mappings, i, semi)) {
i = decodeInteger(mappings, i, state, 1); // sourcesIndex
i = decodeInteger(mappings, i, state, 2); // sourceLine
i = decodeInteger(mappings, i, state, 3); // sourceColumn
if (hasMoreVlq(mappings, i, semi)) {
i = decodeInteger(mappings, i, state, 4); // namesIndex
seg = [col, state[1], state[2], state[3], state[4]];
}
else {
seg = [col, state[1], state[2], state[3]];
}
}
else {
seg = [col];
}
line.push(seg);
}
if (!sorted)
sort(line);
decoded.push(line);
index = semi + 1;
} while (index <= mappings.length);
return decoded;
}
function indexOf(mappings, index) {
const idx = mappings.indexOf(';', index);
return idx === -1 ? mappings.length : idx;
}
function decodeInteger(mappings, pos, state, j) {
let value = 0;
let shift = 0;
let integer = 0;
do {
const c = mappings.charCodeAt(pos++);
integer = charToInt[c];
value |= (integer & 31) << shift;
shift += 5;
} while (integer & 32);
const shouldNegate = value & 1;
value >>>= 1;
if (shouldNegate) {
value = -0x80000000 | -value;
}
state[j] += value;
return pos;
}
function hasMoreVlq(mappings, i, length) {
if (i >= length)
return false;
return mappings.charCodeAt(i) !== comma;
}
function sort(line) {
line.sort(sortComparator);
}
function sortComparator(a, b) {
return a[0] - b[0];
}
function encode(decoded) {
const state = new Int32Array(5);
const bufLength = 1024 * 16;
const subLength = bufLength - 36;
const buf = new Uint8Array(bufLength);
const sub = buf.subarray(0, subLength);
let pos = 0;
let out = '';
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
if (i > 0) {
if (pos === bufLength) {
out += td.decode(buf);
pos = 0;
}
buf[pos++] = semicolon;
}
if (line.length === 0)
continue;
state[0] = 0;
for (let j = 0; j < line.length; j++) {
const segment = line[j];
// We can push up to 5 ints, each int can take at most 7 chars, and we
// may push a comma.
if (pos > subLength) {
out += td.decode(sub);
buf.copyWithin(0, subLength, pos);
pos -= subLength;
}
if (j > 0)
buf[pos++] = comma;
pos = encodeInteger(buf, pos, state, segment, 0); // genColumn
if (segment.length === 1)
continue;
pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex
pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine
pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn
if (segment.length === 4)
continue;
pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex
}
}
return out + td.decode(buf.subarray(0, pos));
}
function encodeInteger(buf, pos, state, segment, j) {
const next = segment[j];
let num = next - state[j];
state[j] = next;
num = num < 0 ? (-num << 1) | 1 : num << 1;
do {
let clamped = num & 0b011111;
num >>>= 5;
if (num > 0)
clamped |= 0b100000;
buf[pos++] = intToChar[clamped];
} while (num > 0);
return pos;
}
export { decode, encode };
//# sourceMappingURL=sourcemap-codec.mjs.map

File diff suppressed because one or more lines are too long

View File

@ -1,175 +0,0 @@
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
typeof define === 'function' && define.amd ? define(['exports'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.sourcemapCodec = {}));
})(this, (function (exports) { 'use strict';
const comma = ','.charCodeAt(0);
const semicolon = ';'.charCodeAt(0);
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
const intToChar = new Uint8Array(64); // 64 possible chars.
const charToInt = new Uint8Array(128); // z is 122 in ASCII
for (let i = 0; i < chars.length; i++) {
const c = chars.charCodeAt(i);
intToChar[i] = c;
charToInt[c] = i;
}
// Provide a fallback for older environments.
const td = typeof TextDecoder !== 'undefined'
? /* #__PURE__ */ new TextDecoder()
: typeof Buffer !== 'undefined'
? {
decode(buf) {
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
return out.toString();
},
}
: {
decode(buf) {
let out = '';
for (let i = 0; i < buf.length; i++) {
out += String.fromCharCode(buf[i]);
}
return out;
},
};
function decode(mappings) {
const state = new Int32Array(5);
const decoded = [];
let index = 0;
do {
const semi = indexOf(mappings, index);
const line = [];
let sorted = true;
let lastCol = 0;
state[0] = 0;
for (let i = index; i < semi; i++) {
let seg;
i = decodeInteger(mappings, i, state, 0); // genColumn
const col = state[0];
if (col < lastCol)
sorted = false;
lastCol = col;
if (hasMoreVlq(mappings, i, semi)) {
i = decodeInteger(mappings, i, state, 1); // sourcesIndex
i = decodeInteger(mappings, i, state, 2); // sourceLine
i = decodeInteger(mappings, i, state, 3); // sourceColumn
if (hasMoreVlq(mappings, i, semi)) {
i = decodeInteger(mappings, i, state, 4); // namesIndex
seg = [col, state[1], state[2], state[3], state[4]];
}
else {
seg = [col, state[1], state[2], state[3]];
}
}
else {
seg = [col];
}
line.push(seg);
}
if (!sorted)
sort(line);
decoded.push(line);
index = semi + 1;
} while (index <= mappings.length);
return decoded;
}
function indexOf(mappings, index) {
const idx = mappings.indexOf(';', index);
return idx === -1 ? mappings.length : idx;
}
function decodeInteger(mappings, pos, state, j) {
let value = 0;
let shift = 0;
let integer = 0;
do {
const c = mappings.charCodeAt(pos++);
integer = charToInt[c];
value |= (integer & 31) << shift;
shift += 5;
} while (integer & 32);
const shouldNegate = value & 1;
value >>>= 1;
if (shouldNegate) {
value = -0x80000000 | -value;
}
state[j] += value;
return pos;
}
function hasMoreVlq(mappings, i, length) {
if (i >= length)
return false;
return mappings.charCodeAt(i) !== comma;
}
function sort(line) {
line.sort(sortComparator);
}
function sortComparator(a, b) {
return a[0] - b[0];
}
function encode(decoded) {
const state = new Int32Array(5);
const bufLength = 1024 * 16;
const subLength = bufLength - 36;
const buf = new Uint8Array(bufLength);
const sub = buf.subarray(0, subLength);
let pos = 0;
let out = '';
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
if (i > 0) {
if (pos === bufLength) {
out += td.decode(buf);
pos = 0;
}
buf[pos++] = semicolon;
}
if (line.length === 0)
continue;
state[0] = 0;
for (let j = 0; j < line.length; j++) {
const segment = line[j];
// We can push up to 5 ints, each int can take at most 7 chars, and we
// may push a comma.
if (pos > subLength) {
out += td.decode(sub);
buf.copyWithin(0, subLength, pos);
pos -= subLength;
}
if (j > 0)
buf[pos++] = comma;
pos = encodeInteger(buf, pos, state, segment, 0); // genColumn
if (segment.length === 1)
continue;
pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex
pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine
pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn
if (segment.length === 4)
continue;
pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex
}
}
return out + td.decode(buf.subarray(0, pos));
}
function encodeInteger(buf, pos, state, segment, j) {
const next = segment[j];
let num = next - state[j];
state[j] = next;
num = num < 0 ? (-num << 1) | 1 : num << 1;
do {
let clamped = num & 0b011111;
num >>>= 5;
if (num > 0)
clamped |= 0b100000;
buf[pos++] = intToChar[clamped];
} while (num > 0);
return pos;
}
exports.decode = decode;
exports.encode = encode;
Object.defineProperty(exports, '__esModule', { value: true });
}));
//# sourceMappingURL=sourcemap-codec.umd.js.map

File diff suppressed because one or more lines are too long

View File

@ -1,6 +0,0 @@
export declare type SourceMapSegment = [number] | [number, number, number, number] | [number, number, number, number, number];
export declare type SourceMapLine = SourceMapSegment[];
export declare type SourceMapMappings = SourceMapLine[];
export declare function decode(mappings: string): SourceMapMappings;
export declare function encode(decoded: SourceMapMappings): string;
export declare function encode(decoded: Readonly<SourceMapMappings>): string;

View File

@ -1,75 +0,0 @@
{
"name": "@jridgewell/sourcemap-codec",
"version": "1.4.14",
"description": "Encode/decode sourcemap mappings",
"keywords": [
"sourcemap",
"vlq"
],
"main": "dist/sourcemap-codec.umd.js",
"module": "dist/sourcemap-codec.mjs",
"typings": "dist/types/sourcemap-codec.d.ts",
"files": [
"dist",
"src"
],
"exports": {
".": [
{
"types": "./dist/types/sourcemap-codec.d.ts",
"browser": "./dist/sourcemap-codec.umd.js",
"import": "./dist/sourcemap-codec.mjs",
"require": "./dist/sourcemap-codec.umd.js"
},
"./dist/sourcemap-codec.umd.js"
],
"./package.json": "./package.json"
},
"scripts": {
"benchmark": "run-s build:rollup benchmark:*",
"benchmark:install": "cd benchmark && npm install",
"benchmark:only": "node --expose-gc benchmark/index.js",
"build": "run-s -n build:*",
"build:rollup": "rollup -c rollup.config.js",
"build:ts": "tsc --project tsconfig.build.json",
"lint": "run-s -n lint:*",
"lint:prettier": "npm run test:lint:prettier -- --write",
"lint:ts": "npm run test:lint:ts -- --fix",
"prebuild": "rm -rf dist",
"prepublishOnly": "npm run preversion",
"preversion": "run-s test build",
"pretest": "run-s build:rollup",
"test": "run-s -n test:lint test:only",
"test:debug": "mocha --inspect-brk",
"test:lint": "run-s -n test:lint:*",
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts'",
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
"test:only": "mocha",
"test:coverage": "c8 mocha",
"test:watch": "mocha --watch"
},
"repository": {
"type": "git",
"url": "git+https://github.com/jridgewell/sourcemap-codec.git"
},
"author": "Rich Harris",
"license": "MIT",
"devDependencies": {
"@rollup/plugin-typescript": "8.3.0",
"@types/node": "17.0.15",
"@typescript-eslint/eslint-plugin": "5.10.0",
"@typescript-eslint/parser": "5.10.0",
"benchmark": "2.1.4",
"c8": "7.11.2",
"eslint": "8.7.0",
"eslint-config-prettier": "8.3.0",
"mocha": "9.2.0",
"npm-run-all": "4.1.5",
"prettier": "2.5.1",
"rollup": "2.64.0",
"source-map": "0.6.1",
"source-map-js": "1.0.2",
"sourcemap-codec": "1.4.8",
"typescript": "4.5.4"
}
}

View File

@ -1,198 +0,0 @@
export type SourceMapSegment =
| [number]
| [number, number, number, number]
| [number, number, number, number, number];
export type SourceMapLine = SourceMapSegment[];
export type SourceMapMappings = SourceMapLine[];
const comma = ','.charCodeAt(0);
const semicolon = ';'.charCodeAt(0);
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
const intToChar = new Uint8Array(64); // 64 possible chars.
const charToInt = new Uint8Array(128); // z is 122 in ASCII
for (let i = 0; i < chars.length; i++) {
const c = chars.charCodeAt(i);
intToChar[i] = c;
charToInt[c] = i;
}
// Provide a fallback for older environments.
const td =
typeof TextDecoder !== 'undefined'
? /* #__PURE__ */ new TextDecoder()
: typeof Buffer !== 'undefined'
? {
decode(buf: Uint8Array) {
const out = Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength);
return out.toString();
},
}
: {
decode(buf: Uint8Array) {
let out = '';
for (let i = 0; i < buf.length; i++) {
out += String.fromCharCode(buf[i]);
}
return out;
},
};
export function decode(mappings: string): SourceMapMappings {
const state: [number, number, number, number, number] = new Int32Array(5) as any;
const decoded: SourceMapMappings = [];
let index = 0;
do {
const semi = indexOf(mappings, index);
const line: SourceMapLine = [];
let sorted = true;
let lastCol = 0;
state[0] = 0;
for (let i = index; i < semi; i++) {
let seg: SourceMapSegment;
i = decodeInteger(mappings, i, state, 0); // genColumn
const col = state[0];
if (col < lastCol) sorted = false;
lastCol = col;
if (hasMoreVlq(mappings, i, semi)) {
i = decodeInteger(mappings, i, state, 1); // sourcesIndex
i = decodeInteger(mappings, i, state, 2); // sourceLine
i = decodeInteger(mappings, i, state, 3); // sourceColumn
if (hasMoreVlq(mappings, i, semi)) {
i = decodeInteger(mappings, i, state, 4); // namesIndex
seg = [col, state[1], state[2], state[3], state[4]];
} else {
seg = [col, state[1], state[2], state[3]];
}
} else {
seg = [col];
}
line.push(seg);
}
if (!sorted) sort(line);
decoded.push(line);
index = semi + 1;
} while (index <= mappings.length);
return decoded;
}
function indexOf(mappings: string, index: number): number {
const idx = mappings.indexOf(';', index);
return idx === -1 ? mappings.length : idx;
}
function decodeInteger(mappings: string, pos: number, state: SourceMapSegment, j: number): number {
let value = 0;
let shift = 0;
let integer = 0;
do {
const c = mappings.charCodeAt(pos++);
integer = charToInt[c];
value |= (integer & 31) << shift;
shift += 5;
} while (integer & 32);
const shouldNegate = value & 1;
value >>>= 1;
if (shouldNegate) {
value = -0x80000000 | -value;
}
state[j] += value;
return pos;
}
function hasMoreVlq(mappings: string, i: number, length: number): boolean {
if (i >= length) return false;
return mappings.charCodeAt(i) !== comma;
}
function sort(line: SourceMapSegment[]) {
line.sort(sortComparator);
}
function sortComparator(a: SourceMapSegment, b: SourceMapSegment): number {
return a[0] - b[0];
}
export function encode(decoded: SourceMapMappings): string;
export function encode(decoded: Readonly<SourceMapMappings>): string;
export function encode(decoded: Readonly<SourceMapMappings>): string {
const state: [number, number, number, number, number] = new Int32Array(5) as any;
const bufLength = 1024 * 16;
const subLength = bufLength - 36;
const buf = new Uint8Array(bufLength);
const sub = buf.subarray(0, subLength);
let pos = 0;
let out = '';
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
if (i > 0) {
if (pos === bufLength) {
out += td.decode(buf);
pos = 0;
}
buf[pos++] = semicolon;
}
if (line.length === 0) continue;
state[0] = 0;
for (let j = 0; j < line.length; j++) {
const segment = line[j];
// We can push up to 5 ints, each int can take at most 7 chars, and we
// may push a comma.
if (pos > subLength) {
out += td.decode(sub);
buf.copyWithin(0, subLength, pos);
pos -= subLength;
}
if (j > 0) buf[pos++] = comma;
pos = encodeInteger(buf, pos, state, segment, 0); // genColumn
if (segment.length === 1) continue;
pos = encodeInteger(buf, pos, state, segment, 1); // sourcesIndex
pos = encodeInteger(buf, pos, state, segment, 2); // sourceLine
pos = encodeInteger(buf, pos, state, segment, 3); // sourceColumn
if (segment.length === 4) continue;
pos = encodeInteger(buf, pos, state, segment, 4); // namesIndex
}
}
return out + td.decode(buf.subarray(0, pos));
}
function encodeInteger(
buf: Uint8Array,
pos: number,
state: SourceMapSegment,
segment: SourceMapSegment,
j: number,
): number {
const next = segment[j];
let num = next - state[j];
state[j] = next;
num = num < 0 ? (-num << 1) | 1 : num << 1;
do {
let clamped = num & 0b011111;
num >>>= 5;
if (num > 0) clamped |= 0b100000;
buf[pos++] = intToChar[clamped];
} while (num > 0);
return pos;
}

View File

@ -1,19 +0,0 @@
Copyright 2022 Justin Ridgewell <justin@ridgewell.name>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,252 +0,0 @@
# @jridgewell/trace-mapping
> Trace the original position through a source map
`trace-mapping` allows you to take the line and column of an output file and trace it to the
original location in the source file through a source map.
You may already be familiar with the [`source-map`][source-map] package's `SourceMapConsumer`. This
provides the same `originalPositionFor` and `generatedPositionFor` API, without requiring WASM.
## Installation
```sh
npm install @jridgewell/trace-mapping
```
## Usage
```typescript
import {
TraceMap,
originalPositionFor,
generatedPositionFor,
sourceContentFor,
} from '@jridgewell/trace-mapping';
const tracer = new TraceMap({
version: 3,
sources: ['input.js'],
sourcesContent: ['content of input.js'],
names: ['foo'],
mappings: 'KAyCIA',
});
// Lines start at line 1, columns at column 0.
const traced = originalPositionFor(tracer, { line: 1, column: 5 });
assert.deepEqual(traced, {
source: 'input.js',
line: 42,
column: 4,
name: 'foo',
});
const content = sourceContentFor(tracer, traced.source);
assert.strictEqual(content, 'content for input.js');
const generated = generatedPositionFor(tracer, {
source: 'input.js',
line: 42,
column: 4,
});
assert.deepEqual(generated, {
line: 1,
column: 5,
});
```
We also provide a lower level API to get the actual segment that matches our line and column. Unlike
`originalPositionFor`, `traceSegment` uses a 0-base for `line`:
```typescript
import { traceSegment } from '@jridgewell/trace-mapping';
// line is 0-base.
const traced = traceSegment(tracer, /* line */ 0, /* column */ 5);
// Segments are [outputColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex]
// Again, line is 0-base and so is sourceLine
assert.deepEqual(traced, [5, 0, 41, 4, 0]);
```
### SectionedSourceMaps
The sourcemap spec defines a special `sections` field that's designed to handle concatenation of
output code with associated sourcemaps. This type of sourcemap is rarely used (no major build tool
produces it), but if you are hand coding a concatenation you may need it. We provide an `AnyMap`
helper that can receive either a regular sourcemap or a `SectionedSourceMap` and returns a
`TraceMap` instance:
```typescript
import { AnyMap } from '@jridgewell/trace-mapping';
const fooOutput = 'foo';
const barOutput = 'bar';
const output = [fooOutput, barOutput].join('\n');
const sectioned = new AnyMap({
version: 3,
sections: [
{
// 0-base line and column
offset: { line: 0, column: 0 },
// fooOutput's sourcemap
map: {
version: 3,
sources: ['foo.js'],
names: ['foo'],
mappings: 'AAAAA',
},
},
{
// barOutput's sourcemap will not affect the first line, only the second
offset: { line: 1, column: 0 },
map: {
version: 3,
sources: ['bar.js'],
names: ['bar'],
mappings: 'AAAAA',
},
},
],
});
const traced = originalPositionFor(sectioned, {
line: 2,
column: 0,
});
assert.deepEqual(traced, {
source: 'bar.js',
line: 1,
column: 0,
name: 'bar',
});
```
## Benchmarks
```
node v18.0.0
amp.js.map - 45120 segments
Memory Usage:
trace-mapping decoded 562400 bytes
trace-mapping encoded 5706544 bytes
source-map-js 10717664 bytes
source-map-0.6.1 17446384 bytes
source-map-0.8.0 9701757 bytes
Smallest memory usage is trace-mapping decoded
Init speed:
trace-mapping: decoded JSON input x 180 ops/sec ±0.34% (85 runs sampled)
trace-mapping: encoded JSON input x 364 ops/sec ±1.77% (89 runs sampled)
trace-mapping: decoded Object input x 3,116 ops/sec ±0.50% (96 runs sampled)
trace-mapping: encoded Object input x 410 ops/sec ±2.62% (85 runs sampled)
source-map-js: encoded Object input x 84.23 ops/sec ±0.91% (73 runs sampled)
source-map-0.6.1: encoded Object input x 37.21 ops/sec ±2.08% (51 runs sampled)
Fastest is trace-mapping: decoded Object input
Trace speed:
trace-mapping: decoded originalPositionFor x 3,952,212 ops/sec ±0.17% (98 runs sampled)
trace-mapping: encoded originalPositionFor x 3,487,468 ops/sec ±1.58% (90 runs sampled)
source-map-js: encoded originalPositionFor x 827,730 ops/sec ±0.78% (97 runs sampled)
source-map-0.6.1: encoded originalPositionFor x 748,991 ops/sec ±0.53% (94 runs sampled)
source-map-0.8.0: encoded originalPositionFor x 2,532,894 ops/sec ±0.57% (95 runs sampled)
Fastest is trace-mapping: decoded originalPositionFor
***
babel.min.js.map - 347793 segments
Memory Usage:
trace-mapping decoded 89832 bytes
trace-mapping encoded 35474640 bytes
source-map-js 51257176 bytes
source-map-0.6.1 63515664 bytes
source-map-0.8.0 42933752 bytes
Smallest memory usage is trace-mapping decoded
Init speed:
trace-mapping: decoded JSON input x 15.41 ops/sec ±8.65% (34 runs sampled)
trace-mapping: encoded JSON input x 28.20 ops/sec ±12.87% (42 runs sampled)
trace-mapping: decoded Object input x 964 ops/sec ±0.36% (99 runs sampled)
trace-mapping: encoded Object input x 31.77 ops/sec ±13.79% (45 runs sampled)
source-map-js: encoded Object input x 6.45 ops/sec ±5.16% (21 runs sampled)
source-map-0.6.1: encoded Object input x 4.07 ops/sec ±5.24% (15 runs sampled)
Fastest is trace-mapping: decoded Object input
Trace speed:
trace-mapping: decoded originalPositionFor x 7,183,038 ops/sec ±0.58% (95 runs sampled)
trace-mapping: encoded originalPositionFor x 5,192,185 ops/sec ±0.41% (100 runs sampled)
source-map-js: encoded originalPositionFor x 4,259,489 ops/sec ±0.79% (94 runs sampled)
source-map-0.6.1: encoded originalPositionFor x 3,742,629 ops/sec ±0.71% (95 runs sampled)
source-map-0.8.0: encoded originalPositionFor x 6,270,211 ops/sec ±0.64% (94 runs sampled)
Fastest is trace-mapping: decoded originalPositionFor
***
preact.js.map - 1992 segments
Memory Usage:
trace-mapping decoded 37128 bytes
trace-mapping encoded 247280 bytes
source-map-js 1143536 bytes
source-map-0.6.1 1290992 bytes
source-map-0.8.0 96544 bytes
Smallest memory usage is trace-mapping decoded
Init speed:
trace-mapping: decoded JSON input x 3,483 ops/sec ±0.30% (98 runs sampled)
trace-mapping: encoded JSON input x 6,092 ops/sec ±0.18% (97 runs sampled)
trace-mapping: decoded Object input x 249,076 ops/sec ±0.24% (98 runs sampled)
trace-mapping: encoded Object input x 14,555 ops/sec ±0.48% (100 runs sampled)
source-map-js: encoded Object input x 2,447 ops/sec ±0.36% (99 runs sampled)
source-map-0.6.1: encoded Object input x 1,201 ops/sec ±0.57% (96 runs sampled)
Fastest is trace-mapping: decoded Object input
Trace speed:
trace-mapping: decoded originalPositionFor x 7,620,192 ops/sec ±0.09% (99 runs sampled)
trace-mapping: encoded originalPositionFor x 6,872,554 ops/sec ±0.30% (97 runs sampled)
source-map-js: encoded originalPositionFor x 2,489,570 ops/sec ±0.35% (94 runs sampled)
source-map-0.6.1: encoded originalPositionFor x 1,698,633 ops/sec ±0.28% (98 runs sampled)
source-map-0.8.0: encoded originalPositionFor x 4,015,644 ops/sec ±0.22% (98 runs sampled)
Fastest is trace-mapping: decoded originalPositionFor
***
react.js.map - 5726 segments
Memory Usage:
trace-mapping decoded 16176 bytes
trace-mapping encoded 681552 bytes
source-map-js 2418352 bytes
source-map-0.6.1 2443672 bytes
source-map-0.8.0 111768 bytes
Smallest memory usage is trace-mapping decoded
Init speed:
trace-mapping: decoded JSON input x 1,720 ops/sec ±0.34% (98 runs sampled)
trace-mapping: encoded JSON input x 4,406 ops/sec ±0.35% (100 runs sampled)
trace-mapping: decoded Object input x 92,122 ops/sec ±0.10% (99 runs sampled)
trace-mapping: encoded Object input x 5,385 ops/sec ±0.37% (99 runs sampled)
source-map-js: encoded Object input x 794 ops/sec ±0.40% (98 runs sampled)
source-map-0.6.1: encoded Object input x 416 ops/sec ±0.54% (91 runs sampled)
Fastest is trace-mapping: decoded Object input
Trace speed:
trace-mapping: decoded originalPositionFor x 32,759,519 ops/sec ±0.33% (100 runs sampled)
trace-mapping: encoded originalPositionFor x 31,116,306 ops/sec ±0.33% (97 runs sampled)
source-map-js: encoded originalPositionFor x 17,458,435 ops/sec ±0.44% (97 runs sampled)
source-map-0.6.1: encoded originalPositionFor x 12,687,097 ops/sec ±0.43% (95 runs sampled)
source-map-0.8.0: encoded originalPositionFor x 23,538,275 ops/sec ±0.38% (95 runs sampled)
Fastest is trace-mapping: decoded originalPositionFor
```
[source-map]: https://www.npmjs.com/package/source-map

View File

@ -1,511 +0,0 @@
import { encode, decode } from '@jridgewell/sourcemap-codec';
import resolveUri from '@jridgewell/resolve-uri';
function resolve(input, base) {
// The base is always treated as a directory, if it's not empty.
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
if (base && !base.endsWith('/'))
base += '/';
return resolveUri(input, base);
}
/**
* Removes everything after the last "/", but leaves the slash.
*/
function stripFilename(path) {
if (!path)
return '';
const index = path.lastIndexOf('/');
return path.slice(0, index + 1);
}
const COLUMN = 0;
const SOURCES_INDEX = 1;
const SOURCE_LINE = 2;
const SOURCE_COLUMN = 3;
const NAMES_INDEX = 4;
const REV_GENERATED_LINE = 1;
const REV_GENERATED_COLUMN = 2;
function maybeSort(mappings, owned) {
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
if (unsortedIndex === mappings.length)
return mappings;
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
// not, we do not want to modify the consumer's input array.
if (!owned)
mappings = mappings.slice();
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
mappings[i] = sortSegments(mappings[i], owned);
}
return mappings;
}
function nextUnsortedSegmentLine(mappings, start) {
for (let i = start; i < mappings.length; i++) {
if (!isSorted(mappings[i]))
return i;
}
return mappings.length;
}
function isSorted(line) {
for (let j = 1; j < line.length; j++) {
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
return false;
}
}
return true;
}
function sortSegments(line, owned) {
if (!owned)
line = line.slice();
return line.sort(sortComparator);
}
function sortComparator(a, b) {
return a[COLUMN] - b[COLUMN];
}
let found = false;
/**
* A binary search implementation that returns the index if a match is found.
* If no match is found, then the left-index (the index associated with the item that comes just
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
* the next index:
*
* ```js
* const array = [1, 3];
* const needle = 2;
* const index = binarySearch(array, needle, (item, needle) => item - needle);
*
* assert.equal(index, 0);
* array.splice(index + 1, 0, needle);
* assert.deepEqual(array, [1, 2, 3]);
* ```
*/
function binarySearch(haystack, needle, low, high) {
while (low <= high) {
const mid = low + ((high - low) >> 1);
const cmp = haystack[mid][COLUMN] - needle;
if (cmp === 0) {
found = true;
return mid;
}
if (cmp < 0) {
low = mid + 1;
}
else {
high = mid - 1;
}
}
found = false;
return low - 1;
}
function upperBound(haystack, needle, index) {
for (let i = index + 1; i < haystack.length; index = i++) {
if (haystack[i][COLUMN] !== needle)
break;
}
return index;
}
function lowerBound(haystack, needle, index) {
for (let i = index - 1; i >= 0; index = i--) {
if (haystack[i][COLUMN] !== needle)
break;
}
return index;
}
function memoizedState() {
return {
lastKey: -1,
lastNeedle: -1,
lastIndex: -1,
};
}
/**
* This overly complicated beast is just to record the last tested line/column and the resulting
* index, allowing us to skip a few tests if mappings are monotonically increasing.
*/
function memoizedBinarySearch(haystack, needle, state, key) {
const { lastKey, lastNeedle, lastIndex } = state;
let low = 0;
let high = haystack.length - 1;
if (key === lastKey) {
if (needle === lastNeedle) {
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
return lastIndex;
}
if (needle >= lastNeedle) {
// lastIndex may be -1 if the previous needle was not found.
low = lastIndex === -1 ? 0 : lastIndex;
}
else {
high = lastIndex;
}
}
state.lastKey = key;
state.lastNeedle = needle;
return (state.lastIndex = binarySearch(haystack, needle, low, high));
}
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
// of generated line/column.
function buildBySources(decoded, memos) {
const sources = memos.map(buildNullArray);
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
if (seg.length === 1)
continue;
const sourceIndex = seg[SOURCES_INDEX];
const sourceLine = seg[SOURCE_LINE];
const sourceColumn = seg[SOURCE_COLUMN];
const originalSource = sources[sourceIndex];
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
const memo = memos[sourceIndex];
// The binary search either found a match, or it found the left-index just before where the
// segment should go. Either way, we want to insert after that. And there may be multiple
// generated segments associated with an original location, so there may need to move several
// indexes before we find where we need to insert.
const index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
insert(originalLine, (memo.lastIndex = index + 1), [sourceColumn, i, seg[COLUMN]]);
}
}
return sources;
}
function insert(array, index, value) {
for (let i = array.length; i > index; i--) {
array[i] = array[i - 1];
}
array[index] = value;
}
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
// order when iterating with for-in.
function buildNullArray() {
return { __proto__: null };
}
const AnyMap = function (map, mapUrl) {
const parsed = typeof map === 'string' ? JSON.parse(map) : map;
if (!('sections' in parsed))
return new TraceMap(parsed, mapUrl);
const mappings = [];
const sources = [];
const sourcesContent = [];
const names = [];
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, 0, 0, Infinity, Infinity);
const joined = {
version: 3,
file: parsed.file,
names,
sources,
sourcesContent,
mappings,
};
return presortedDecodedMap(joined);
};
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, lineOffset, columnOffset, stopLine, stopColumn) {
const { sections } = input;
for (let i = 0; i < sections.length; i++) {
const { map, offset } = sections[i];
let sl = stopLine;
let sc = stopColumn;
if (i + 1 < sections.length) {
const nextOffset = sections[i + 1].offset;
sl = Math.min(stopLine, lineOffset + nextOffset.line);
if (sl === stopLine) {
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
}
else if (sl < stopLine) {
sc = columnOffset + nextOffset.column;
}
}
addSection(map, mapUrl, mappings, sources, sourcesContent, names, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
}
}
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, lineOffset, columnOffset, stopLine, stopColumn) {
if ('sections' in input)
return recurse(...arguments);
const map = new TraceMap(input, mapUrl);
const sourcesOffset = sources.length;
const namesOffset = names.length;
const decoded = decodedMappings(map);
const { resolvedSources, sourcesContent: contents } = map;
append(sources, resolvedSources);
append(names, map.names);
if (contents)
append(sourcesContent, contents);
else
for (let i = 0; i < resolvedSources.length; i++)
sourcesContent.push(null);
for (let i = 0; i < decoded.length; i++) {
const lineI = lineOffset + i;
// We can only add so many lines before we step into the range that the next section's map
// controls. When we get to the last line, then we'll start checking the segments to see if
// they've crossed into the column range. But it may not have any columns that overstep, so we
// still need to check that we don't overstep lines, too.
if (lineI > stopLine)
return;
// The out line may already exist in mappings (if we're continuing the line started by a
// previous section). Or, we may have jumped ahead several lines to start this section.
const out = getLine(mappings, lineI);
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
// map can be multiple lines), it doesn't.
const cOffset = i === 0 ? columnOffset : 0;
const line = decoded[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const column = cOffset + seg[COLUMN];
// If this segment steps into the column range that the next section's map controls, we need
// to stop early.
if (lineI === stopLine && column >= stopColumn)
return;
if (seg.length === 1) {
out.push([column]);
continue;
}
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
const sourceLine = seg[SOURCE_LINE];
const sourceColumn = seg[SOURCE_COLUMN];
out.push(seg.length === 4
? [column, sourcesIndex, sourceLine, sourceColumn]
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
}
}
}
function append(arr, other) {
for (let i = 0; i < other.length; i++)
arr.push(other[i]);
}
function getLine(arr, index) {
for (let i = arr.length; i <= index; i++)
arr[i] = [];
return arr[index];
}
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
const LEAST_UPPER_BOUND = -1;
const GREATEST_LOWER_BOUND = 1;
/**
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
*/
let encodedMappings;
/**
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
*/
let decodedMappings;
/**
* A low-level API to find the segment associated with a generated line/column (think, from a
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
*/
let traceSegment;
/**
* A higher-level API to find the source/line/column associated with a generated line/column
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
* `source-map` library.
*/
let originalPositionFor;
/**
* Finds the source/line/column directly after the mapping returned by originalPositionFor, provided
* the found mapping is from the same source and line as the originalPositionFor mapping.
*
* Eg, in the code `let id = 1`, `originalPositionAfter` could find the mapping associated with `1`
* using the same needle that would return `id` when calling `originalPositionFor`.
*/
let generatedPositionFor;
/**
* Iterates each mapping in generated position order.
*/
let eachMapping;
/**
* Retrieves the source content for a particular source, if its found. Returns null if not.
*/
let sourceContentFor;
/**
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
* maps.
*/
let presortedDecodedMap;
/**
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
let decodedMap;
/**
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
let encodedMap;
class TraceMap {
constructor(map, mapUrl) {
const isString = typeof map === 'string';
if (!isString && map._decodedMemo)
return map;
const parsed = (isString ? JSON.parse(map) : map);
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
this.version = version;
this.file = file;
this.names = names;
this.sourceRoot = sourceRoot;
this.sources = sources;
this.sourcesContent = sourcesContent;
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
this.resolvedSources = sources.map((s) => resolve(s || '', from));
const { mappings } = parsed;
if (typeof mappings === 'string') {
this._encoded = mappings;
this._decoded = undefined;
}
else {
this._encoded = undefined;
this._decoded = maybeSort(mappings, isString);
}
this._decodedMemo = memoizedState();
this._bySources = undefined;
this._bySourceMemos = undefined;
}
}
(() => {
encodedMappings = (map) => {
var _a;
return ((_a = map._encoded) !== null && _a !== void 0 ? _a : (map._encoded = encode(map._decoded)));
};
decodedMappings = (map) => {
return (map._decoded || (map._decoded = decode(map._encoded)));
};
traceSegment = (map, line, column) => {
const decoded = decodedMappings(map);
// It's common for parent source maps to have pointers to lines that have no
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
if (line >= decoded.length)
return null;
return traceSegmentInternal(decoded[line], map._decodedMemo, line, column, GREATEST_LOWER_BOUND);
};
originalPositionFor = (map, { line, column, bias }) => {
line--;
if (line < 0)
throw new Error(LINE_GTR_ZERO);
if (column < 0)
throw new Error(COL_GTR_EQ_ZERO);
const decoded = decodedMappings(map);
// It's common for parent source maps to have pointers to lines that have no
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
if (line >= decoded.length)
return OMapping(null, null, null, null);
const segment = traceSegmentInternal(decoded[line], map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
if (segment == null)
return OMapping(null, null, null, null);
if (segment.length == 1)
return OMapping(null, null, null, null);
const { names, resolvedSources } = map;
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
};
generatedPositionFor = (map, { source, line, column, bias }) => {
line--;
if (line < 0)
throw new Error(LINE_GTR_ZERO);
if (column < 0)
throw new Error(COL_GTR_EQ_ZERO);
const { sources, resolvedSources } = map;
let sourceIndex = sources.indexOf(source);
if (sourceIndex === -1)
sourceIndex = resolvedSources.indexOf(source);
if (sourceIndex === -1)
return GMapping(null, null);
const generated = (map._bySources || (map._bySources = buildBySources(decodedMappings(map), (map._bySourceMemos = sources.map(memoizedState)))));
const memos = map._bySourceMemos;
const segments = generated[sourceIndex][line];
if (segments == null)
return GMapping(null, null);
const segment = traceSegmentInternal(segments, memos[sourceIndex], line, column, bias || GREATEST_LOWER_BOUND);
if (segment == null)
return GMapping(null, null);
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
};
eachMapping = (map, cb) => {
const decoded = decodedMappings(map);
const { names, resolvedSources } = map;
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const generatedLine = i + 1;
const generatedColumn = seg[0];
let source = null;
let originalLine = null;
let originalColumn = null;
let name = null;
if (seg.length !== 1) {
source = resolvedSources[seg[1]];
originalLine = seg[2] + 1;
originalColumn = seg[3];
}
if (seg.length === 5)
name = names[seg[4]];
cb({
generatedLine,
generatedColumn,
source,
originalLine,
originalColumn,
name,
});
}
}
};
sourceContentFor = (map, source) => {
const { sources, resolvedSources, sourcesContent } = map;
if (sourcesContent == null)
return null;
let index = sources.indexOf(source);
if (index === -1)
index = resolvedSources.indexOf(source);
return index === -1 ? null : sourcesContent[index];
};
presortedDecodedMap = (map, mapUrl) => {
const tracer = new TraceMap(clone(map, []), mapUrl);
tracer._decoded = map.mappings;
return tracer;
};
decodedMap = (map) => {
return clone(map, decodedMappings(map));
};
encodedMap = (map) => {
return clone(map, encodedMappings(map));
};
})();
function clone(map, mappings) {
return {
version: map.version,
file: map.file,
names: map.names,
sourceRoot: map.sourceRoot,
sources: map.sources,
sourcesContent: map.sourcesContent,
mappings,
};
}
function OMapping(source, line, column, name) {
return { source, line, column, name };
}
function GMapping(line, column) {
return { line, column };
}
function traceSegmentInternal(segments, memo, line, column, bias) {
let index = memoizedBinarySearch(segments, column, memo, line);
if (found) {
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
}
else if (bias === LEAST_UPPER_BOUND)
index++;
if (index === -1 || index === segments.length)
return null;
return segments[index];
}
export { AnyMap, GREATEST_LOWER_BOUND, LEAST_UPPER_BOUND, TraceMap, decodedMap, decodedMappings, eachMapping, encodedMap, encodedMappings, generatedPositionFor, originalPositionFor, presortedDecodedMap, sourceContentFor, traceSegment };
//# sourceMappingURL=trace-mapping.mjs.map

File diff suppressed because one or more lines are too long

View File

@ -1,525 +0,0 @@
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('@jridgewell/sourcemap-codec'), require('@jridgewell/resolve-uri')) :
typeof define === 'function' && define.amd ? define(['exports', '@jridgewell/sourcemap-codec', '@jridgewell/resolve-uri'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.traceMapping = {}, global.sourcemapCodec, global.resolveURI));
})(this, (function (exports, sourcemapCodec, resolveUri) { 'use strict';
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
var resolveUri__default = /*#__PURE__*/_interopDefaultLegacy(resolveUri);
function resolve(input, base) {
// The base is always treated as a directory, if it's not empty.
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
if (base && !base.endsWith('/'))
base += '/';
return resolveUri__default["default"](input, base);
}
/**
* Removes everything after the last "/", but leaves the slash.
*/
function stripFilename(path) {
if (!path)
return '';
const index = path.lastIndexOf('/');
return path.slice(0, index + 1);
}
const COLUMN = 0;
const SOURCES_INDEX = 1;
const SOURCE_LINE = 2;
const SOURCE_COLUMN = 3;
const NAMES_INDEX = 4;
const REV_GENERATED_LINE = 1;
const REV_GENERATED_COLUMN = 2;
function maybeSort(mappings, owned) {
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
if (unsortedIndex === mappings.length)
return mappings;
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
// not, we do not want to modify the consumer's input array.
if (!owned)
mappings = mappings.slice();
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
mappings[i] = sortSegments(mappings[i], owned);
}
return mappings;
}
function nextUnsortedSegmentLine(mappings, start) {
for (let i = start; i < mappings.length; i++) {
if (!isSorted(mappings[i]))
return i;
}
return mappings.length;
}
function isSorted(line) {
for (let j = 1; j < line.length; j++) {
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
return false;
}
}
return true;
}
function sortSegments(line, owned) {
if (!owned)
line = line.slice();
return line.sort(sortComparator);
}
function sortComparator(a, b) {
return a[COLUMN] - b[COLUMN];
}
let found = false;
/**
* A binary search implementation that returns the index if a match is found.
* If no match is found, then the left-index (the index associated with the item that comes just
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
* the next index:
*
* ```js
* const array = [1, 3];
* const needle = 2;
* const index = binarySearch(array, needle, (item, needle) => item - needle);
*
* assert.equal(index, 0);
* array.splice(index + 1, 0, needle);
* assert.deepEqual(array, [1, 2, 3]);
* ```
*/
function binarySearch(haystack, needle, low, high) {
while (low <= high) {
const mid = low + ((high - low) >> 1);
const cmp = haystack[mid][COLUMN] - needle;
if (cmp === 0) {
found = true;
return mid;
}
if (cmp < 0) {
low = mid + 1;
}
else {
high = mid - 1;
}
}
found = false;
return low - 1;
}
function upperBound(haystack, needle, index) {
for (let i = index + 1; i < haystack.length; index = i++) {
if (haystack[i][COLUMN] !== needle)
break;
}
return index;
}
function lowerBound(haystack, needle, index) {
for (let i = index - 1; i >= 0; index = i--) {
if (haystack[i][COLUMN] !== needle)
break;
}
return index;
}
function memoizedState() {
return {
lastKey: -1,
lastNeedle: -1,
lastIndex: -1,
};
}
/**
* This overly complicated beast is just to record the last tested line/column and the resulting
* index, allowing us to skip a few tests if mappings are monotonically increasing.
*/
function memoizedBinarySearch(haystack, needle, state, key) {
const { lastKey, lastNeedle, lastIndex } = state;
let low = 0;
let high = haystack.length - 1;
if (key === lastKey) {
if (needle === lastNeedle) {
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
return lastIndex;
}
if (needle >= lastNeedle) {
// lastIndex may be -1 if the previous needle was not found.
low = lastIndex === -1 ? 0 : lastIndex;
}
else {
high = lastIndex;
}
}
state.lastKey = key;
state.lastNeedle = needle;
return (state.lastIndex = binarySearch(haystack, needle, low, high));
}
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
// of generated line/column.
function buildBySources(decoded, memos) {
const sources = memos.map(buildNullArray);
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
if (seg.length === 1)
continue;
const sourceIndex = seg[SOURCES_INDEX];
const sourceLine = seg[SOURCE_LINE];
const sourceColumn = seg[SOURCE_COLUMN];
const originalSource = sources[sourceIndex];
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
const memo = memos[sourceIndex];
// The binary search either found a match, or it found the left-index just before where the
// segment should go. Either way, we want to insert after that. And there may be multiple
// generated segments associated with an original location, so there may need to move several
// indexes before we find where we need to insert.
const index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
insert(originalLine, (memo.lastIndex = index + 1), [sourceColumn, i, seg[COLUMN]]);
}
}
return sources;
}
function insert(array, index, value) {
for (let i = array.length; i > index; i--) {
array[i] = array[i - 1];
}
array[index] = value;
}
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
// order when iterating with for-in.
function buildNullArray() {
return { __proto__: null };
}
const AnyMap = function (map, mapUrl) {
const parsed = typeof map === 'string' ? JSON.parse(map) : map;
if (!('sections' in parsed))
return new TraceMap(parsed, mapUrl);
const mappings = [];
const sources = [];
const sourcesContent = [];
const names = [];
recurse(parsed, mapUrl, mappings, sources, sourcesContent, names, 0, 0, Infinity, Infinity);
const joined = {
version: 3,
file: parsed.file,
names,
sources,
sourcesContent,
mappings,
};
return exports.presortedDecodedMap(joined);
};
function recurse(input, mapUrl, mappings, sources, sourcesContent, names, lineOffset, columnOffset, stopLine, stopColumn) {
const { sections } = input;
for (let i = 0; i < sections.length; i++) {
const { map, offset } = sections[i];
let sl = stopLine;
let sc = stopColumn;
if (i + 1 < sections.length) {
const nextOffset = sections[i + 1].offset;
sl = Math.min(stopLine, lineOffset + nextOffset.line);
if (sl === stopLine) {
sc = Math.min(stopColumn, columnOffset + nextOffset.column);
}
else if (sl < stopLine) {
sc = columnOffset + nextOffset.column;
}
}
addSection(map, mapUrl, mappings, sources, sourcesContent, names, lineOffset + offset.line, columnOffset + offset.column, sl, sc);
}
}
function addSection(input, mapUrl, mappings, sources, sourcesContent, names, lineOffset, columnOffset, stopLine, stopColumn) {
if ('sections' in input)
return recurse(...arguments);
const map = new TraceMap(input, mapUrl);
const sourcesOffset = sources.length;
const namesOffset = names.length;
const decoded = exports.decodedMappings(map);
const { resolvedSources, sourcesContent: contents } = map;
append(sources, resolvedSources);
append(names, map.names);
if (contents)
append(sourcesContent, contents);
else
for (let i = 0; i < resolvedSources.length; i++)
sourcesContent.push(null);
for (let i = 0; i < decoded.length; i++) {
const lineI = lineOffset + i;
// We can only add so many lines before we step into the range that the next section's map
// controls. When we get to the last line, then we'll start checking the segments to see if
// they've crossed into the column range. But it may not have any columns that overstep, so we
// still need to check that we don't overstep lines, too.
if (lineI > stopLine)
return;
// The out line may already exist in mappings (if we're continuing the line started by a
// previous section). Or, we may have jumped ahead several lines to start this section.
const out = getLine(mappings, lineI);
// On the 0th loop, the section's column offset shifts us forward. On all other lines (since the
// map can be multiple lines), it doesn't.
const cOffset = i === 0 ? columnOffset : 0;
const line = decoded[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const column = cOffset + seg[COLUMN];
// If this segment steps into the column range that the next section's map controls, we need
// to stop early.
if (lineI === stopLine && column >= stopColumn)
return;
if (seg.length === 1) {
out.push([column]);
continue;
}
const sourcesIndex = sourcesOffset + seg[SOURCES_INDEX];
const sourceLine = seg[SOURCE_LINE];
const sourceColumn = seg[SOURCE_COLUMN];
out.push(seg.length === 4
? [column, sourcesIndex, sourceLine, sourceColumn]
: [column, sourcesIndex, sourceLine, sourceColumn, namesOffset + seg[NAMES_INDEX]]);
}
}
}
function append(arr, other) {
for (let i = 0; i < other.length; i++)
arr.push(other[i]);
}
function getLine(arr, index) {
for (let i = arr.length; i <= index; i++)
arr[i] = [];
return arr[index];
}
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
const LEAST_UPPER_BOUND = -1;
const GREATEST_LOWER_BOUND = 1;
/**
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
*/
exports.encodedMappings = void 0;
/**
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
*/
exports.decodedMappings = void 0;
/**
* A low-level API to find the segment associated with a generated line/column (think, from a
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
*/
exports.traceSegment = void 0;
/**
* A higher-level API to find the source/line/column associated with a generated line/column
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
* `source-map` library.
*/
exports.originalPositionFor = void 0;
/**
* Finds the source/line/column directly after the mapping returned by originalPositionFor, provided
* the found mapping is from the same source and line as the originalPositionFor mapping.
*
* Eg, in the code `let id = 1`, `originalPositionAfter` could find the mapping associated with `1`
* using the same needle that would return `id` when calling `originalPositionFor`.
*/
exports.generatedPositionFor = void 0;
/**
* Iterates each mapping in generated position order.
*/
exports.eachMapping = void 0;
/**
* Retrieves the source content for a particular source, if its found. Returns null if not.
*/
exports.sourceContentFor = void 0;
/**
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
* maps.
*/
exports.presortedDecodedMap = void 0;
/**
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
exports.decodedMap = void 0;
/**
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
exports.encodedMap = void 0;
class TraceMap {
constructor(map, mapUrl) {
const isString = typeof map === 'string';
if (!isString && map._decodedMemo)
return map;
const parsed = (isString ? JSON.parse(map) : map);
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
this.version = version;
this.file = file;
this.names = names;
this.sourceRoot = sourceRoot;
this.sources = sources;
this.sourcesContent = sourcesContent;
const from = resolve(sourceRoot || '', stripFilename(mapUrl));
this.resolvedSources = sources.map((s) => resolve(s || '', from));
const { mappings } = parsed;
if (typeof mappings === 'string') {
this._encoded = mappings;
this._decoded = undefined;
}
else {
this._encoded = undefined;
this._decoded = maybeSort(mappings, isString);
}
this._decodedMemo = memoizedState();
this._bySources = undefined;
this._bySourceMemos = undefined;
}
}
(() => {
exports.encodedMappings = (map) => {
var _a;
return ((_a = map._encoded) !== null && _a !== void 0 ? _a : (map._encoded = sourcemapCodec.encode(map._decoded)));
};
exports.decodedMappings = (map) => {
return (map._decoded || (map._decoded = sourcemapCodec.decode(map._encoded)));
};
exports.traceSegment = (map, line, column) => {
const decoded = exports.decodedMappings(map);
// It's common for parent source maps to have pointers to lines that have no
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
if (line >= decoded.length)
return null;
return traceSegmentInternal(decoded[line], map._decodedMemo, line, column, GREATEST_LOWER_BOUND);
};
exports.originalPositionFor = (map, { line, column, bias }) => {
line--;
if (line < 0)
throw new Error(LINE_GTR_ZERO);
if (column < 0)
throw new Error(COL_GTR_EQ_ZERO);
const decoded = exports.decodedMappings(map);
// It's common for parent source maps to have pointers to lines that have no
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
if (line >= decoded.length)
return OMapping(null, null, null, null);
const segment = traceSegmentInternal(decoded[line], map._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
if (segment == null)
return OMapping(null, null, null, null);
if (segment.length == 1)
return OMapping(null, null, null, null);
const { names, resolvedSources } = map;
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
};
exports.generatedPositionFor = (map, { source, line, column, bias }) => {
line--;
if (line < 0)
throw new Error(LINE_GTR_ZERO);
if (column < 0)
throw new Error(COL_GTR_EQ_ZERO);
const { sources, resolvedSources } = map;
let sourceIndex = sources.indexOf(source);
if (sourceIndex === -1)
sourceIndex = resolvedSources.indexOf(source);
if (sourceIndex === -1)
return GMapping(null, null);
const generated = (map._bySources || (map._bySources = buildBySources(exports.decodedMappings(map), (map._bySourceMemos = sources.map(memoizedState)))));
const memos = map._bySourceMemos;
const segments = generated[sourceIndex][line];
if (segments == null)
return GMapping(null, null);
const segment = traceSegmentInternal(segments, memos[sourceIndex], line, column, bias || GREATEST_LOWER_BOUND);
if (segment == null)
return GMapping(null, null);
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
};
exports.eachMapping = (map, cb) => {
const decoded = exports.decodedMappings(map);
const { names, resolvedSources } = map;
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const generatedLine = i + 1;
const generatedColumn = seg[0];
let source = null;
let originalLine = null;
let originalColumn = null;
let name = null;
if (seg.length !== 1) {
source = resolvedSources[seg[1]];
originalLine = seg[2] + 1;
originalColumn = seg[3];
}
if (seg.length === 5)
name = names[seg[4]];
cb({
generatedLine,
generatedColumn,
source,
originalLine,
originalColumn,
name,
});
}
}
};
exports.sourceContentFor = (map, source) => {
const { sources, resolvedSources, sourcesContent } = map;
if (sourcesContent == null)
return null;
let index = sources.indexOf(source);
if (index === -1)
index = resolvedSources.indexOf(source);
return index === -1 ? null : sourcesContent[index];
};
exports.presortedDecodedMap = (map, mapUrl) => {
const tracer = new TraceMap(clone(map, []), mapUrl);
tracer._decoded = map.mappings;
return tracer;
};
exports.decodedMap = (map) => {
return clone(map, exports.decodedMappings(map));
};
exports.encodedMap = (map) => {
return clone(map, exports.encodedMappings(map));
};
})();
function clone(map, mappings) {
return {
version: map.version,
file: map.file,
names: map.names,
sourceRoot: map.sourceRoot,
sources: map.sources,
sourcesContent: map.sourcesContent,
mappings,
};
}
function OMapping(source, line, column, name) {
return { source, line, column, name };
}
function GMapping(line, column) {
return { line, column };
}
function traceSegmentInternal(segments, memo, line, column, bias) {
let index = memoizedBinarySearch(segments, column, memo, line);
if (found) {
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
}
else if (bias === LEAST_UPPER_BOUND)
index++;
if (index === -1 || index === segments.length)
return null;
return segments[index];
}
exports.AnyMap = AnyMap;
exports.GREATEST_LOWER_BOUND = GREATEST_LOWER_BOUND;
exports.LEAST_UPPER_BOUND = LEAST_UPPER_BOUND;
exports.TraceMap = TraceMap;
Object.defineProperty(exports, '__esModule', { value: true });
}));
//# sourceMappingURL=trace-mapping.umd.js.map

File diff suppressed because one or more lines are too long

View File

@ -1,8 +0,0 @@
import { TraceMap } from './trace-mapping';
import type { SectionedSourceMapInput } from './types';
declare type AnyMap = {
new (map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
(map: SectionedSourceMapInput, mapUrl?: string | null): TraceMap;
};
export declare const AnyMap: AnyMap;
export {};

View File

@ -1,32 +0,0 @@
import type { SourceMapSegment, ReverseSegment } from './sourcemap-segment';
export declare type MemoState = {
lastKey: number;
lastNeedle: number;
lastIndex: number;
};
export declare let found: boolean;
/**
* A binary search implementation that returns the index if a match is found.
* If no match is found, then the left-index (the index associated with the item that comes just
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
* the next index:
*
* ```js
* const array = [1, 3];
* const needle = 2;
* const index = binarySearch(array, needle, (item, needle) => item - needle);
*
* assert.equal(index, 0);
* array.splice(index + 1, 0, needle);
* assert.deepEqual(array, [1, 2, 3]);
* ```
*/
export declare function binarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, low: number, high: number): number;
export declare function upperBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
export declare function lowerBound(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, index: number): number;
export declare function memoizedState(): MemoState;
/**
* This overly complicated beast is just to record the last tested line/column and the resulting
* index, allowing us to skip a few tests if mappings are monotonically increasing.
*/
export declare function memoizedBinarySearch(haystack: SourceMapSegment[] | ReverseSegment[], needle: number, state: MemoState, key: number): number;

View File

@ -1,7 +0,0 @@
import type { ReverseSegment, SourceMapSegment } from './sourcemap-segment';
import type { MemoState } from './binary-search';
export declare type Source = {
__proto__: null;
[line: number]: Exclude<ReverseSegment, [number]>[];
};
export default function buildBySources(decoded: readonly SourceMapSegment[][], memos: MemoState[]): Source[];

View File

@ -1 +0,0 @@
export default function resolve(input: string, base: string | undefined): string;

View File

@ -1,2 +0,0 @@
import type { SourceMapSegment } from './sourcemap-segment';
export default function maybeSort(mappings: SourceMapSegment[][], owned: boolean): SourceMapSegment[][];

View File

@ -1,16 +0,0 @@
declare type GeneratedColumn = number;
declare type SourcesIndex = number;
declare type SourceLine = number;
declare type SourceColumn = number;
declare type NamesIndex = number;
declare type GeneratedLine = number;
export declare type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
export declare type ReverseSegment = [SourceColumn, GeneratedLine, GeneratedColumn];
export declare const COLUMN = 0;
export declare const SOURCES_INDEX = 1;
export declare const SOURCE_LINE = 2;
export declare const SOURCE_COLUMN = 3;
export declare const NAMES_INDEX = 4;
export declare const REV_GENERATED_LINE = 1;
export declare const REV_GENERATED_COLUMN = 2;
export {};

View File

@ -1,4 +0,0 @@
/**
* Removes everything after the last "/", but leaves the slash.
*/
export default function stripFilename(path: string | undefined | null): string;

View File

@ -1,74 +0,0 @@
import type { SourceMapSegment } from './sourcemap-segment';
import type { SourceMapV3, DecodedSourceMap, EncodedSourceMap, InvalidOriginalMapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, SourceMapInput, Needle, SourceNeedle, SourceMap, EachMapping } from './types';
export type { SourceMapSegment } from './sourcemap-segment';
export type { SourceMapInput, SectionedSourceMapInput, DecodedSourceMap, EncodedSourceMap, SectionedSourceMap, InvalidOriginalMapping, OriginalMapping as Mapping, OriginalMapping, InvalidGeneratedMapping, GeneratedMapping, EachMapping, } from './types';
export declare const LEAST_UPPER_BOUND = -1;
export declare const GREATEST_LOWER_BOUND = 1;
/**
* Returns the encoded (VLQ string) form of the SourceMap's mappings field.
*/
export declare let encodedMappings: (map: TraceMap) => EncodedSourceMap['mappings'];
/**
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
*/
export declare let decodedMappings: (map: TraceMap) => Readonly<DecodedSourceMap['mappings']>;
/**
* A low-level API to find the segment associated with a generated line/column (think, from a
* stack trace). Line and column here are 0-based, unlike `originalPositionFor`.
*/
export declare let traceSegment: (map: TraceMap, line: number, column: number) => Readonly<SourceMapSegment> | null;
/**
* A higher-level API to find the source/line/column associated with a generated line/column
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
* `source-map` library.
*/
export declare let originalPositionFor: (map: TraceMap, needle: Needle) => OriginalMapping | InvalidOriginalMapping;
/**
* Finds the source/line/column directly after the mapping returned by originalPositionFor, provided
* the found mapping is from the same source and line as the originalPositionFor mapping.
*
* Eg, in the code `let id = 1`, `originalPositionAfter` could find the mapping associated with `1`
* using the same needle that would return `id` when calling `originalPositionFor`.
*/
export declare let generatedPositionFor: (map: TraceMap, needle: SourceNeedle) => GeneratedMapping | InvalidGeneratedMapping;
/**
* Iterates each mapping in generated position order.
*/
export declare let eachMapping: (map: TraceMap, cb: (mapping: EachMapping) => void) => void;
/**
* Retrieves the source content for a particular source, if its found. Returns null if not.
*/
export declare let sourceContentFor: (map: TraceMap, source: string) => string | null;
/**
* A helper that skips sorting of the input map's mappings array, which can be expensive for larger
* maps.
*/
export declare let presortedDecodedMap: (map: DecodedSourceMap, mapUrl?: string) => TraceMap;
/**
* Returns a sourcemap object (with decoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
export declare let decodedMap: (map: TraceMap) => Omit<DecodedSourceMap, 'mappings'> & {
mappings: readonly SourceMapSegment[][];
};
/**
* Returns a sourcemap object (with encoded mappings) suitable for passing to a library that expects
* a sourcemap, or to JSON.stringify.
*/
export declare let encodedMap: (map: TraceMap) => EncodedSourceMap;
export { AnyMap } from './any-map';
export declare class TraceMap implements SourceMap {
version: SourceMapV3['version'];
file: SourceMapV3['file'];
names: SourceMapV3['names'];
sourceRoot: SourceMapV3['sourceRoot'];
sources: SourceMapV3['sources'];
sourcesContent: SourceMapV3['sourcesContent'];
resolvedSources: string[];
private _encoded;
private _decoded;
private _decodedMemo;
private _bySources;
private _bySourceMemos;
constructor(map: SourceMapInput, mapUrl?: string | null);
}

View File

@ -1,85 +0,0 @@
import type { SourceMapSegment } from './sourcemap-segment';
import type { TraceMap } from './trace-mapping';
export interface SourceMapV3 {
file?: string | null;
names: string[];
sourceRoot?: string;
sources: (string | null)[];
sourcesContent?: (string | null)[];
version: 3;
}
export interface EncodedSourceMap extends SourceMapV3 {
mappings: string;
}
export interface DecodedSourceMap extends SourceMapV3 {
mappings: SourceMapSegment[][];
}
export interface Section {
offset: {
line: number;
column: number;
};
map: EncodedSourceMap | DecodedSourceMap | SectionedSourceMap;
}
export interface SectionedSourceMap {
file?: string | null;
sections: Section[];
version: 3;
}
export declare type OriginalMapping = {
source: string | null;
line: number;
column: number;
name: string | null;
};
export declare type InvalidOriginalMapping = {
source: null;
line: null;
column: null;
name: null;
};
export declare type GeneratedMapping = {
line: number;
column: number;
};
export declare type InvalidGeneratedMapping = {
line: null;
column: null;
};
export declare type SourceMapInput = string | EncodedSourceMap | DecodedSourceMap | TraceMap;
export declare type SectionedSourceMapInput = SourceMapInput | SectionedSourceMap;
export declare type Needle = {
line: number;
column: number;
bias?: 1 | -1;
};
export declare type SourceNeedle = {
source: string;
line: number;
column: number;
bias?: 1 | -1;
};
export declare type EachMapping = {
generatedLine: number;
generatedColumn: number;
source: null;
originalLine: null;
originalColumn: null;
name: null;
} | {
generatedLine: number;
generatedColumn: number;
source: string | null;
originalLine: number;
originalColumn: number;
name: string | null;
};
export declare abstract class SourceMap {
version: SourceMapV3['version'];
file: SourceMapV3['file'];
names: SourceMapV3['names'];
sourceRoot: SourceMapV3['sourceRoot'];
sources: SourceMapV3['sources'];
sourcesContent: SourceMapV3['sourcesContent'];
resolvedSources: SourceMapV3['sources'];
}

View File

@ -1,75 +0,0 @@
{
"name": "@jridgewell/trace-mapping",
"version": "0.3.15",
"description": "Trace the original position through a source map",
"keywords": [
"source",
"map"
],
"main": "dist/trace-mapping.umd.js",
"module": "dist/trace-mapping.mjs",
"typings": "dist/types/trace-mapping.d.ts",
"files": [
"dist"
],
"exports": {
".": [
{
"types": "./dist/types/trace-mapping.d.ts",
"browser": "./dist/trace-mapping.umd.js",
"require": "./dist/trace-mapping.umd.js",
"import": "./dist/trace-mapping.mjs"
},
"./dist/trace-mapping.umd.js"
],
"./package.json": "./package.json"
},
"author": "Justin Ridgewell <justin@ridgewell.name>",
"repository": {
"type": "git",
"url": "git+https://github.com/jridgewell/trace-mapping.git"
},
"license": "MIT",
"scripts": {
"benchmark": "run-s build:rollup benchmark:*",
"benchmark:install": "cd benchmark && npm install",
"benchmark:only": "node --expose-gc benchmark/index.mjs",
"build": "run-s -n build:*",
"build:rollup": "rollup -c rollup.config.js",
"build:ts": "tsc --project tsconfig.build.json",
"lint": "run-s -n lint:*",
"lint:prettier": "npm run test:lint:prettier -- --write",
"lint:ts": "npm run test:lint:ts -- --fix",
"prebuild": "rm -rf dist",
"prepublishOnly": "npm run preversion",
"preversion": "run-s test build",
"test": "run-s -n test:lint test:only",
"test:debug": "ava debug",
"test:lint": "run-s -n test:lint:*",
"test:lint:prettier": "prettier --check '{src,test}/**/*.ts' '**/*.md'",
"test:lint:ts": "eslint '{src,test}/**/*.ts'",
"test:only": "c8 ava",
"test:watch": "ava --watch"
},
"devDependencies": {
"@rollup/plugin-typescript": "8.3.2",
"@typescript-eslint/eslint-plugin": "5.23.0",
"@typescript-eslint/parser": "5.23.0",
"ava": "4.2.0",
"benchmark": "2.1.4",
"c8": "7.11.2",
"esbuild": "0.14.38",
"esbuild-node-loader": "0.8.0",
"eslint": "8.15.0",
"eslint-config-prettier": "8.5.0",
"eslint-plugin-no-only-tests": "2.6.0",
"npm-run-all": "4.1.5",
"prettier": "2.6.2",
"rollup": "2.72.1",
"typescript": "4.6.4"
},
"dependencies": {
"@jridgewell/resolve-uri": "^3.0.3",
"@jridgewell/sourcemap-codec": "^1.4.10"
}
}

View File

@ -1,21 +0,0 @@
The MIT License (MIT)
Copyright (c) Denis Malinochkin
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,171 +0,0 @@
# @nodelib/fs.scandir
> List files and directories inside the specified directory.
## :bulb: Highlights
The package is aimed at obtaining information about entries in the directory.
* :moneybag: Returns useful information: `name`, `path`, `dirent` and `stats` (optional).
* :gear: On Node.js 10.10+ uses the mechanism without additional calls to determine the entry type. See [`old` and `modern` mode](#old-and-modern-mode).
* :link: Can safely work with broken symbolic links.
## Install
```console
npm install @nodelib/fs.scandir
```
## Usage
```ts
import * as fsScandir from '@nodelib/fs.scandir';
fsScandir.scandir('path', (error, stats) => { /* … */ });
```
## API
### .scandir(path, [optionsOrSettings], callback)
Returns an array of plain objects ([`Entry`](#entry)) with information about entry for provided path with standard callback-style.
```ts
fsScandir.scandir('path', (error, entries) => { /* … */ });
fsScandir.scandir('path', {}, (error, entries) => { /* … */ });
fsScandir.scandir('path', new fsScandir.Settings(), (error, entries) => { /* … */ });
```
### .scandirSync(path, [optionsOrSettings])
Returns an array of plain objects ([`Entry`](#entry)) with information about entry for provided path.
```ts
const entries = fsScandir.scandirSync('path');
const entries = fsScandir.scandirSync('path', {});
const entries = fsScandir.scandirSync(('path', new fsScandir.Settings());
```
#### path
* Required: `true`
* Type: `string | Buffer | URL`
A path to a file. If a URL is provided, it must use the `file:` protocol.
#### optionsOrSettings
* Required: `false`
* Type: `Options | Settings`
* Default: An instance of `Settings` class
An [`Options`](#options) object or an instance of [`Settings`](#settingsoptions) class.
> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class.
### Settings([options])
A class of full settings of the package.
```ts
const settings = new fsScandir.Settings({ followSymbolicLinks: false });
const entries = fsScandir.scandirSync('path', settings);
```
## Entry
* `name` — The name of the entry (`unknown.txt`).
* `path` — The path of the entry relative to call directory (`root/unknown.txt`).
* `dirent` — An instance of [`fs.Dirent`](./src/types/index.ts) class. On Node.js below 10.10 will be emulated by [`DirentFromStats`](./src/utils/fs.ts) class.
* `stats` (optional) — An instance of `fs.Stats` class.
For example, the `scandir` call for `tools` directory with one directory inside:
```ts
{
dirent: Dirent { name: 'typedoc', /* … */ },
name: 'typedoc',
path: 'tools/typedoc'
}
```
## Options
### stats
* Type: `boolean`
* Default: `false`
Adds an instance of `fs.Stats` class to the [`Entry`](#entry).
> :book: Always use `fs.readdir` without the `withFileTypes` option. ??TODO??
### followSymbolicLinks
* Type: `boolean`
* Default: `false`
Follow symbolic links or not. Call `fs.stat` on symbolic link if `true`.
### `throwErrorOnBrokenSymbolicLink`
* Type: `boolean`
* Default: `true`
Throw an error when symbolic link is broken if `true` or safely use `lstat` call if `false`.
### `pathSegmentSeparator`
* Type: `string`
* Default: `path.sep`
By default, this package uses the correct path separator for your OS (`\` on Windows, `/` on Unix-like systems). But you can set this option to any separator character(s) that you want to use instead.
### `fs`
* Type: [`FileSystemAdapter`](./src/adapters/fs.ts)
* Default: A default FS methods
By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own.
```ts
interface FileSystemAdapter {
lstat?: typeof fs.lstat;
stat?: typeof fs.stat;
lstatSync?: typeof fs.lstatSync;
statSync?: typeof fs.statSync;
readdir?: typeof fs.readdir;
readdirSync?: typeof fs.readdirSync;
}
const settings = new fsScandir.Settings({
fs: { lstat: fakeLstat }
});
```
## `old` and `modern` mode
This package has two modes that are used depending on the environment and parameters of use.
### old
* Node.js below `10.10` or when the `stats` option is enabled
When working in the old mode, the directory is read first (`fs.readdir`), then the type of entries is determined (`fs.lstat` and/or `fs.stat` for symbolic links).
### modern
* Node.js 10.10+ and the `stats` option is disabled
In the modern mode, reading the directory (`fs.readdir` with the `withFileTypes` option) is combined with obtaining information about its entries. An additional call for symbolic links (`fs.stat`) is still present.
This mode makes fewer calls to the file system. It's faster.
## Changelog
See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version.
## License
This software is released under the terms of the MIT license.

View File

@ -1,20 +0,0 @@
import type * as fsStat from '@nodelib/fs.stat';
import type { Dirent, ErrnoException } from '../types';
export interface ReaddirAsynchronousMethod {
(filepath: string, options: {
withFileTypes: true;
}, callback: (error: ErrnoException | null, files: Dirent[]) => void): void;
(filepath: string, callback: (error: ErrnoException | null, files: string[]) => void): void;
}
export interface ReaddirSynchronousMethod {
(filepath: string, options: {
withFileTypes: true;
}): Dirent[];
(filepath: string): string[];
}
export declare type FileSystemAdapter = fsStat.FileSystemAdapter & {
readdir: ReaddirAsynchronousMethod;
readdirSync: ReaddirSynchronousMethod;
};
export declare const FILE_SYSTEM_ADAPTER: FileSystemAdapter;
export declare function createFileSystemAdapter(fsMethods?: Partial<FileSystemAdapter>): FileSystemAdapter;

View File

@ -1,19 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0;
const fs = require("fs");
exports.FILE_SYSTEM_ADAPTER = {
lstat: fs.lstat,
stat: fs.stat,
lstatSync: fs.lstatSync,
statSync: fs.statSync,
readdir: fs.readdir,
readdirSync: fs.readdirSync
};
function createFileSystemAdapter(fsMethods) {
if (fsMethods === undefined) {
return exports.FILE_SYSTEM_ADAPTER;
}
return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods);
}
exports.createFileSystemAdapter = createFileSystemAdapter;

View File

@ -1,4 +0,0 @@
/**
* IS `true` for Node.js 10.10 and greater.
*/
export declare const IS_SUPPORT_READDIR_WITH_FILE_TYPES: boolean;

View File

@ -1,17 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = void 0;
const NODE_PROCESS_VERSION_PARTS = process.versions.node.split('.');
if (NODE_PROCESS_VERSION_PARTS[0] === undefined || NODE_PROCESS_VERSION_PARTS[1] === undefined) {
throw new Error(`Unexpected behavior. The 'process.versions.node' variable has invalid value: ${process.versions.node}`);
}
const MAJOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[0], 10);
const MINOR_VERSION = Number.parseInt(NODE_PROCESS_VERSION_PARTS[1], 10);
const SUPPORTED_MAJOR_VERSION = 10;
const SUPPORTED_MINOR_VERSION = 10;
const IS_MATCHED_BY_MAJOR = MAJOR_VERSION > SUPPORTED_MAJOR_VERSION;
const IS_MATCHED_BY_MAJOR_AND_MINOR = MAJOR_VERSION === SUPPORTED_MAJOR_VERSION && MINOR_VERSION >= SUPPORTED_MINOR_VERSION;
/**
* IS `true` for Node.js 10.10 and greater.
*/
exports.IS_SUPPORT_READDIR_WITH_FILE_TYPES = IS_MATCHED_BY_MAJOR || IS_MATCHED_BY_MAJOR_AND_MINOR;

View File

@ -1,12 +0,0 @@
import type { FileSystemAdapter, ReaddirAsynchronousMethod, ReaddirSynchronousMethod } from './adapters/fs';
import * as async from './providers/async';
import Settings, { Options } from './settings';
import type { Dirent, Entry } from './types';
declare type AsyncCallback = async.AsyncCallback;
declare function scandir(path: string, callback: AsyncCallback): void;
declare function scandir(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void;
declare namespace scandir {
function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise<Entry[]>;
}
declare function scandirSync(path: string, optionsOrSettings?: Options | Settings): Entry[];
export { scandir, scandirSync, Settings, AsyncCallback, Dirent, Entry, FileSystemAdapter, ReaddirAsynchronousMethod, ReaddirSynchronousMethod, Options };

View File

@ -1,26 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Settings = exports.scandirSync = exports.scandir = void 0;
const async = require("./providers/async");
const sync = require("./providers/sync");
const settings_1 = require("./settings");
exports.Settings = settings_1.default;
function scandir(path, optionsOrSettingsOrCallback, callback) {
if (typeof optionsOrSettingsOrCallback === 'function') {
async.read(path, getSettings(), optionsOrSettingsOrCallback);
return;
}
async.read(path, getSettings(optionsOrSettingsOrCallback), callback);
}
exports.scandir = scandir;
function scandirSync(path, optionsOrSettings) {
const settings = getSettings(optionsOrSettings);
return sync.read(path, settings);
}
exports.scandirSync = scandirSync;
function getSettings(settingsOrOptions = {}) {
if (settingsOrOptions instanceof settings_1.default) {
return settingsOrOptions;
}
return new settings_1.default(settingsOrOptions);
}

View File

@ -1,7 +0,0 @@
/// <reference types="node" />
import type Settings from '../settings';
import type { Entry } from '../types';
export declare type AsyncCallback = (error: NodeJS.ErrnoException, entries: Entry[]) => void;
export declare function read(directory: string, settings: Settings, callback: AsyncCallback): void;
export declare function readdirWithFileTypes(directory: string, settings: Settings, callback: AsyncCallback): void;
export declare function readdir(directory: string, settings: Settings, callback: AsyncCallback): void;

View File

@ -1,104 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.readdir = exports.readdirWithFileTypes = exports.read = void 0;
const fsStat = require("@nodelib/fs.stat");
const rpl = require("run-parallel");
const constants_1 = require("../constants");
const utils = require("../utils");
const common = require("./common");
function read(directory, settings, callback) {
if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {
readdirWithFileTypes(directory, settings, callback);
return;
}
readdir(directory, settings, callback);
}
exports.read = read;
function readdirWithFileTypes(directory, settings, callback) {
settings.fs.readdir(directory, { withFileTypes: true }, (readdirError, dirents) => {
if (readdirError !== null) {
callFailureCallback(callback, readdirError);
return;
}
const entries = dirents.map((dirent) => ({
dirent,
name: dirent.name,
path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator)
}));
if (!settings.followSymbolicLinks) {
callSuccessCallback(callback, entries);
return;
}
const tasks = entries.map((entry) => makeRplTaskEntry(entry, settings));
rpl(tasks, (rplError, rplEntries) => {
if (rplError !== null) {
callFailureCallback(callback, rplError);
return;
}
callSuccessCallback(callback, rplEntries);
});
});
}
exports.readdirWithFileTypes = readdirWithFileTypes;
function makeRplTaskEntry(entry, settings) {
return (done) => {
if (!entry.dirent.isSymbolicLink()) {
done(null, entry);
return;
}
settings.fs.stat(entry.path, (statError, stats) => {
if (statError !== null) {
if (settings.throwErrorOnBrokenSymbolicLink) {
done(statError);
return;
}
done(null, entry);
return;
}
entry.dirent = utils.fs.createDirentFromStats(entry.name, stats);
done(null, entry);
});
};
}
function readdir(directory, settings, callback) {
settings.fs.readdir(directory, (readdirError, names) => {
if (readdirError !== null) {
callFailureCallback(callback, readdirError);
return;
}
const tasks = names.map((name) => {
const path = common.joinPathSegments(directory, name, settings.pathSegmentSeparator);
return (done) => {
fsStat.stat(path, settings.fsStatSettings, (error, stats) => {
if (error !== null) {
done(error);
return;
}
const entry = {
name,
path,
dirent: utils.fs.createDirentFromStats(name, stats)
};
if (settings.stats) {
entry.stats = stats;
}
done(null, entry);
});
};
});
rpl(tasks, (rplError, entries) => {
if (rplError !== null) {
callFailureCallback(callback, rplError);
return;
}
callSuccessCallback(callback, entries);
});
});
}
exports.readdir = readdir;
function callFailureCallback(callback, error) {
callback(error);
}
function callSuccessCallback(callback, result) {
callback(null, result);
}

View File

@ -1 +0,0 @@
export declare function joinPathSegments(a: string, b: string, separator: string): string;

View File

@ -1,13 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.joinPathSegments = void 0;
function joinPathSegments(a, b, separator) {
/**
* The correct handling of cases when the first segment is a root (`/`, `C:/`) or UNC path (`//?/C:/`).
*/
if (a.endsWith(separator)) {
return a + b;
}
return a + separator + b;
}
exports.joinPathSegments = joinPathSegments;

View File

@ -1,5 +0,0 @@
import type Settings from '../settings';
import type { Entry } from '../types';
export declare function read(directory: string, settings: Settings): Entry[];
export declare function readdirWithFileTypes(directory: string, settings: Settings): Entry[];
export declare function readdir(directory: string, settings: Settings): Entry[];

View File

@ -1,54 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.readdir = exports.readdirWithFileTypes = exports.read = void 0;
const fsStat = require("@nodelib/fs.stat");
const constants_1 = require("../constants");
const utils = require("../utils");
const common = require("./common");
function read(directory, settings) {
if (!settings.stats && constants_1.IS_SUPPORT_READDIR_WITH_FILE_TYPES) {
return readdirWithFileTypes(directory, settings);
}
return readdir(directory, settings);
}
exports.read = read;
function readdirWithFileTypes(directory, settings) {
const dirents = settings.fs.readdirSync(directory, { withFileTypes: true });
return dirents.map((dirent) => {
const entry = {
dirent,
name: dirent.name,
path: common.joinPathSegments(directory, dirent.name, settings.pathSegmentSeparator)
};
if (entry.dirent.isSymbolicLink() && settings.followSymbolicLinks) {
try {
const stats = settings.fs.statSync(entry.path);
entry.dirent = utils.fs.createDirentFromStats(entry.name, stats);
}
catch (error) {
if (settings.throwErrorOnBrokenSymbolicLink) {
throw error;
}
}
}
return entry;
});
}
exports.readdirWithFileTypes = readdirWithFileTypes;
function readdir(directory, settings) {
const names = settings.fs.readdirSync(directory);
return names.map((name) => {
const entryPath = common.joinPathSegments(directory, name, settings.pathSegmentSeparator);
const stats = fsStat.statSync(entryPath, settings.fsStatSettings);
const entry = {
name,
path: entryPath,
dirent: utils.fs.createDirentFromStats(name, stats)
};
if (settings.stats) {
entry.stats = stats;
}
return entry;
});
}
exports.readdir = readdir;

View File

@ -1,20 +0,0 @@
import * as fsStat from '@nodelib/fs.stat';
import * as fs from './adapters/fs';
export interface Options {
followSymbolicLinks?: boolean;
fs?: Partial<fs.FileSystemAdapter>;
pathSegmentSeparator?: string;
stats?: boolean;
throwErrorOnBrokenSymbolicLink?: boolean;
}
export default class Settings {
private readonly _options;
readonly followSymbolicLinks: boolean;
readonly fs: fs.FileSystemAdapter;
readonly pathSegmentSeparator: string;
readonly stats: boolean;
readonly throwErrorOnBrokenSymbolicLink: boolean;
readonly fsStatSettings: fsStat.Settings;
constructor(_options?: Options);
private _getValue;
}

View File

@ -1,24 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const path = require("path");
const fsStat = require("@nodelib/fs.stat");
const fs = require("./adapters/fs");
class Settings {
constructor(_options = {}) {
this._options = _options;
this.followSymbolicLinks = this._getValue(this._options.followSymbolicLinks, false);
this.fs = fs.createFileSystemAdapter(this._options.fs);
this.pathSegmentSeparator = this._getValue(this._options.pathSegmentSeparator, path.sep);
this.stats = this._getValue(this._options.stats, false);
this.throwErrorOnBrokenSymbolicLink = this._getValue(this._options.throwErrorOnBrokenSymbolicLink, true);
this.fsStatSettings = new fsStat.Settings({
followSymbolicLink: this.followSymbolicLinks,
fs: this.fs,
throwErrorOnBrokenSymbolicLink: this.throwErrorOnBrokenSymbolicLink
});
}
_getValue(option, value) {
return option !== null && option !== void 0 ? option : value;
}
}
exports.default = Settings;

View File

@ -1,20 +0,0 @@
/// <reference types="node" />
import type * as fs from 'fs';
export interface Entry {
dirent: Dirent;
name: string;
path: string;
stats?: Stats;
}
export declare type Stats = fs.Stats;
export declare type ErrnoException = NodeJS.ErrnoException;
export interface Dirent {
isBlockDevice: () => boolean;
isCharacterDevice: () => boolean;
isDirectory: () => boolean;
isFIFO: () => boolean;
isFile: () => boolean;
isSocket: () => boolean;
isSymbolicLink: () => boolean;
name: string;
}

View File

@ -1,2 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });

View File

@ -1,2 +0,0 @@
import type { Dirent, Stats } from '../types';
export declare function createDirentFromStats(name: string, stats: Stats): Dirent;

View File

@ -1,19 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.createDirentFromStats = void 0;
class DirentFromStats {
constructor(name, stats) {
this.name = name;
this.isBlockDevice = stats.isBlockDevice.bind(stats);
this.isCharacterDevice = stats.isCharacterDevice.bind(stats);
this.isDirectory = stats.isDirectory.bind(stats);
this.isFIFO = stats.isFIFO.bind(stats);
this.isFile = stats.isFile.bind(stats);
this.isSocket = stats.isSocket.bind(stats);
this.isSymbolicLink = stats.isSymbolicLink.bind(stats);
}
}
function createDirentFromStats(name, stats) {
return new DirentFromStats(name, stats);
}
exports.createDirentFromStats = createDirentFromStats;

View File

@ -1,2 +0,0 @@
import * as fs from './fs';
export { fs };

View File

@ -1,5 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.fs = void 0;
const fs = require("./fs");
exports.fs = fs;

View File

@ -1,44 +0,0 @@
{
"name": "@nodelib/fs.scandir",
"version": "2.1.5",
"description": "List files and directories inside the specified directory",
"license": "MIT",
"repository": "https://github.com/nodelib/nodelib/tree/master/packages/fs/fs.scandir",
"keywords": [
"NodeLib",
"fs",
"FileSystem",
"file system",
"scandir",
"readdir",
"dirent"
],
"engines": {
"node": ">= 8"
},
"files": [
"out/**",
"!out/**/*.map",
"!out/**/*.spec.*"
],
"main": "out/index.js",
"typings": "out/index.d.ts",
"scripts": {
"clean": "rimraf {tsconfig.tsbuildinfo,out}",
"lint": "eslint \"src/**/*.ts\" --cache",
"compile": "tsc -b .",
"compile:watch": "tsc -p . --watch --sourceMap",
"test": "mocha \"out/**/*.spec.js\" -s 0",
"build": "npm run clean && npm run compile && npm run lint && npm test",
"watch": "npm run clean && npm run compile:watch"
},
"dependencies": {
"@nodelib/fs.stat": "2.0.5",
"run-parallel": "^1.1.9"
},
"devDependencies": {
"@nodelib/fs.macchiato": "1.0.4",
"@types/run-parallel": "^1.1.0"
},
"gitHead": "d6a7960d5281d3dd5f8e2efba49bb552d090f562"
}

View File

@ -1,21 +0,0 @@
The MIT License (MIT)
Copyright (c) Denis Malinochkin
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -1,126 +0,0 @@
# @nodelib/fs.stat
> Get the status of a file with some features.
## :bulb: Highlights
Wrapper around standard method `fs.lstat` and `fs.stat` with some features.
* :beginner: Normally follows symbolic link.
* :gear: Can safely work with broken symbolic link.
## Install
```console
npm install @nodelib/fs.stat
```
## Usage
```ts
import * as fsStat from '@nodelib/fs.stat';
fsStat.stat('path', (error, stats) => { /* … */ });
```
## API
### .stat(path, [optionsOrSettings], callback)
Returns an instance of `fs.Stats` class for provided path with standard callback-style.
```ts
fsStat.stat('path', (error, stats) => { /* … */ });
fsStat.stat('path', {}, (error, stats) => { /* … */ });
fsStat.stat('path', new fsStat.Settings(), (error, stats) => { /* … */ });
```
### .statSync(path, [optionsOrSettings])
Returns an instance of `fs.Stats` class for provided path.
```ts
const stats = fsStat.stat('path');
const stats = fsStat.stat('path', {});
const stats = fsStat.stat('path', new fsStat.Settings());
```
#### path
* Required: `true`
* Type: `string | Buffer | URL`
A path to a file. If a URL is provided, it must use the `file:` protocol.
#### optionsOrSettings
* Required: `false`
* Type: `Options | Settings`
* Default: An instance of `Settings` class
An [`Options`](#options) object or an instance of [`Settings`](#settings) class.
> :book: When you pass a plain object, an instance of the `Settings` class will be created automatically. If you plan to call the method frequently, use a pre-created instance of the `Settings` class.
### Settings([options])
A class of full settings of the package.
```ts
const settings = new fsStat.Settings({ followSymbolicLink: false });
const stats = fsStat.stat('path', settings);
```
## Options
### `followSymbolicLink`
* Type: `boolean`
* Default: `true`
Follow symbolic link or not. Call `fs.stat` on symbolic link if `true`.
### `markSymbolicLink`
* Type: `boolean`
* Default: `false`
Mark symbolic link by setting the return value of `isSymbolicLink` function to always `true` (even after `fs.stat`).
> :book: Can be used if you want to know what is hidden behind a symbolic link, but still continue to know that it is a symbolic link.
### `throwErrorOnBrokenSymbolicLink`
* Type: `boolean`
* Default: `true`
Throw an error when symbolic link is broken if `true` or safely return `lstat` call if `false`.
### `fs`
* Type: [`FileSystemAdapter`](./src/adapters/fs.ts)
* Default: A default FS methods
By default, the built-in Node.js module (`fs`) is used to work with the file system. You can replace any method with your own.
```ts
interface FileSystemAdapter {
lstat?: typeof fs.lstat;
stat?: typeof fs.stat;
lstatSync?: typeof fs.lstatSync;
statSync?: typeof fs.statSync;
}
const settings = new fsStat.Settings({
fs: { lstat: fakeLstat }
});
```
## Changelog
See the [Releases section of our GitHub project](https://github.com/nodelib/nodelib/releases) for changelog for each release version.
## License
This software is released under the terms of the MIT license.

View File

@ -1,13 +0,0 @@
/// <reference types="node" />
import * as fs from 'fs';
import type { ErrnoException } from '../types';
export declare type StatAsynchronousMethod = (path: string, callback: (error: ErrnoException | null, stats: fs.Stats) => void) => void;
export declare type StatSynchronousMethod = (path: string) => fs.Stats;
export interface FileSystemAdapter {
lstat: StatAsynchronousMethod;
stat: StatAsynchronousMethod;
lstatSync: StatSynchronousMethod;
statSync: StatSynchronousMethod;
}
export declare const FILE_SYSTEM_ADAPTER: FileSystemAdapter;
export declare function createFileSystemAdapter(fsMethods?: Partial<FileSystemAdapter>): FileSystemAdapter;

View File

@ -1,17 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.createFileSystemAdapter = exports.FILE_SYSTEM_ADAPTER = void 0;
const fs = require("fs");
exports.FILE_SYSTEM_ADAPTER = {
lstat: fs.lstat,
stat: fs.stat,
lstatSync: fs.lstatSync,
statSync: fs.statSync
};
function createFileSystemAdapter(fsMethods) {
if (fsMethods === undefined) {
return exports.FILE_SYSTEM_ADAPTER;
}
return Object.assign(Object.assign({}, exports.FILE_SYSTEM_ADAPTER), fsMethods);
}
exports.createFileSystemAdapter = createFileSystemAdapter;

View File

@ -1,12 +0,0 @@
import type { FileSystemAdapter, StatAsynchronousMethod, StatSynchronousMethod } from './adapters/fs';
import * as async from './providers/async';
import Settings, { Options } from './settings';
import type { Stats } from './types';
declare type AsyncCallback = async.AsyncCallback;
declare function stat(path: string, callback: AsyncCallback): void;
declare function stat(path: string, optionsOrSettings: Options | Settings, callback: AsyncCallback): void;
declare namespace stat {
function __promisify__(path: string, optionsOrSettings?: Options | Settings): Promise<Stats>;
}
declare function statSync(path: string, optionsOrSettings?: Options | Settings): Stats;
export { Settings, stat, statSync, AsyncCallback, FileSystemAdapter, StatAsynchronousMethod, StatSynchronousMethod, Options, Stats };

View File

@ -1,26 +0,0 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.statSync = exports.stat = exports.Settings = void 0;
const async = require("./providers/async");
const sync = require("./providers/sync");
const settings_1 = require("./settings");
exports.Settings = settings_1.default;
function stat(path, optionsOrSettingsOrCallback, callback) {
if (typeof optionsOrSettingsOrCallback === 'function') {
async.read(path, getSettings(), optionsOrSettingsOrCallback);
return;
}
async.read(path, getSettings(optionsOrSettingsOrCallback), callback);
}
exports.stat = stat;
function statSync(path, optionsOrSettings) {
const settings = getSettings(optionsOrSettings);
return sync.read(path, settings);
}
exports.statSync = statSync;
function getSettings(settingsOrOptions = {}) {
if (settingsOrOptions instanceof settings_1.default) {
return settingsOrOptions;
}
return new settings_1.default(settingsOrOptions);
}

Some files were not shown because too many files have changed in this diff Show More